3 from sqlalchemy import create_engine, and_
4 from sqlalchemy.orm import sessionmaker
6 from sfa.util.config import Config
7 from sfa.util.sfalogging import logger
9 from sqlalchemy import Column, Integer, String, DateTime
10 from sqlalchemy import Table, Column, MetaData, join, ForeignKey
11 import sfa.storage.model as model
13 from sqlalchemy.ext.declarative import declarative_base
14 from sqlalchemy.orm import relationship, backref
17 from sqlalchemy import MetaData, Table
18 from sqlalchemy.exc import NoSuchTableError
20 #Dict holding the columns names of the table as keys
21 #and their type, used for creation of the table
22 slice_table = {'record_id_user':'integer PRIMARY KEY references X ON DELETE CASCADE ON UPDATE CASCADE','oar_job_id':'integer DEFAULT -1', 'record_id_slice':'integer', 'slice_hrn':'text NOT NULL'}
24 #Dict with all the specific senslab tables
25 tablenames_dict = {'slice_senslab': slice_table}
27 ##############################
31 SlabBase = declarative_base()
36 class SliceSenslab (SlabBase):
37 __tablename__ = 'slice_senslab'
38 record_id_user = Column(Integer, primary_key=True)
39 oar_job_id = Column( Integer,default = -1)
40 record_id_slice = Column(Integer)
41 slice_hrn = Column(String,nullable = False)
43 def __init__ (self, slice_hrn =None, oar_job_id=None, record_id_slice=None, record_id_user= None):
45 self.record_id_slice = record_id_slice
47 self.slice_hrn = slice_hrn
49 self.oar_job_id = oar_job_id
51 self.slice_hrn = slice_hrn
53 self.record_id_user= record_id_user
56 result="<Record id user =%s, slice hrn=%s, oar_job id=%s,Record id slice =%s" % \
57 (self.record_id_user, self.slice_hrn, self.oar_job_id, self.record_id_slice)
62 #class PeerSenslab(SlabBase):
63 #__tablename__ = 'peer_senslab'
64 #peername = Column(String, nullable = False)
65 #peerid = Column( Integer,primary_key=True)
67 #def __init__ (self,peername = None ):
69 #self.peername = peername
73 #result="<Peer id =%s, Peer name =%s" % (self.peerid, self.peername)
78 def __init__(self,config):
79 self.sl_base = SlabBase
82 # will be created lazily on-demand
83 self.slab_session = None
84 # the former PostgreSQL.py used the psycopg2 directly and was doing
85 #self.connection.set_client_encoding("UNICODE")
86 # it's unclear how to achieve this in sqlalchemy, nor if it's needed at all
87 # http://www.sqlalchemy.org/docs/dialects/postgresql.html#unicode
88 # we indeed have /var/lib/pgsql/data/postgresql.conf where
89 # this setting is unset, it might be an angle to tweak that if need be
90 # try a unix socket first - omitting the hostname does the trick
91 unix_url = "postgresql+psycopg2://%s:%s@:%s/%s"%\
92 (config.SFA_DB_USER,config.SFA_DB_PASSWORD,config.SFA_DB_PORT,dbname)
93 print >>sys.stderr, " \r\n \r\n SLAPOSTGRES INIT unix_url %s" %(unix_url)
94 # the TCP fallback method
95 tcp_url = "postgresql+psycopg2://%s:%s@%s:%s/%s"%\
96 (config.SFA_DB_USER,config.SFA_DB_PASSWORD,config.SFA_DB_HOST,config.SFA_DB_PORT,dbname)
97 for url in [ unix_url, tcp_url ] :
99 self.slab_engine = create_engine (url,echo_pool=True,echo=True)
105 self.slab_engine=None
106 raise Exception,"Could not connect to database"
109 self.slab_engine.execute ("select 1").scalar()
113 if self.slab_session is None:
114 Session=sessionmaker ()
115 self.slab_session=Session(bind=self.slab_engine)
116 return self.slab_session
121 #Close connection to database
123 if self.connection is not None:
124 self.connection.close()
125 self.connection = None
130 def exists(self, tablename):
132 Checks if the table specified as tablename exists.
137 metadata = MetaData (bind=self.slab_engine)
138 table=Table (tablename, metadata, autoload=True)
141 except NoSuchTableError:
142 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES EXISTS NOPE! tablename %s " %(tablename)
146 def createtable(self, tablename ):
148 Creates the specifed table. Uses the global dictionnary holding the tablenames and
153 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES createtable SlabBase.metadata.sorted_tables %s \r\n engine %s" %(SlabBase.metadata.sorted_tables , slab_engine)
154 SlabBase.metadata.create_all(slab_engine)
158 def update_job(self, job_id, hrn):
159 slice_rec = slab_dbsession.query(SliceSenslab).filter_by(slice_hrn = hrn).first()
160 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES update_job slice_rec %s"%(slice_rec)
161 slice_rec.oar_job_id = job_id
162 slab_dbsession.commit()
164 def find (self, name = None, filter_dict = None):
165 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES find filter_dict %s"%(filter_dict)
167 #Filter_by can not handle more than one argument, hence these functions
168 def filter_id_user(query, user_id):
169 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES find filter_id_user"
170 return query.filter_by(record_id_user = user_id)
172 def filter_job(query, job):
173 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES find filter_job "
174 return query.filter_by(oar_job_id = job)
176 def filer_id_slice (query, id_slice):
177 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES find filer_id_slice"
178 return query.filter_by(record_id_slice = id_slice)
180 def filter_slice_hrn(query, hrn):
181 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES find filter_slice_hrn"
182 return query.filter_by(slice_hrn = hrn)
185 extended_filter = {'record_id_user': filter_id_user,
186 'oar_job_id':filter_job,
187 'record_id_slice': filer_id_slice,
188 'slice_hrn': filter_slice_hrn}
190 Q = slab_dbsession.query(SliceSenslab)
192 if filter_dict is not None:
193 for k in filter_dict:
195 newQ= extended_filter[k](Q, filter_dict[k])
198 print>>sys.stderr, "\r\n \t\t FFFFFFFFFFFFFFFFUUUUUUUUFUFUFU!!!!!!!!"
199 print>>sys.stderr, " HEEEEEEEEEEEEY %s " %(Q.all())
202 reclist.append(dict(zip(['record_id_user','oar_job_id', 'record_id_slice','slice_hrn'],[rec.record_id_user,rec.oar_job_id,rec.record_id_slice, rec.slice_hrn])))
203 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES find reclist %s" %(reclist)
209 from sfa.util.config import Config
211 slab_alchemy= SlabDB(Config())
212 slab_engine=slab_alchemy.slab_engine
213 slab_dbsession=slab_alchemy.session()