3 from sqlalchemy import create_engine, and_
4 from sqlalchemy.orm import sessionmaker
6 from sfa.util.config import Config
7 from sfa.util.sfalogging import logger
9 from sqlalchemy import Column, Integer, String, DateTime
10 from sqlalchemy import Table, Column, MetaData, join, ForeignKey
11 import sfa.storage.model as model
13 from sqlalchemy.ext.declarative import declarative_base
14 from sqlalchemy.orm import relationship, backref
17 from sqlalchemy.dialects import postgresql
19 from sqlalchemy import MetaData, Table
20 from sqlalchemy.exc import NoSuchTableError
22 from sqlalchemy import String
24 #Dict holding the columns names of the table as keys
25 #and their type, used for creation of the table
26 slice_table = {'record_id_user':'integer PRIMARY KEY references X ON DELETE CASCADE ON UPDATE CASCADE','oar_job_id':'integer DEFAULT -1', 'record_id_slice':'integer', 'slice_hrn':'text NOT NULL'}
28 #Dict with all the specific senslab tables
29 tablenames_dict = {'slice_senslab': slice_table}
31 ##############################
35 SlabBase = declarative_base()
40 class SliceSenslab (SlabBase):
41 __tablename__ = 'slice_senslab'
42 #record_id_user = Column(Integer, primary_key=True)
43 slice_hrn = Column(String,primary_key=True)
44 peer_authority = Column( String,nullable = True)
45 record_id_slice = Column(Integer)
46 record_id_user = Column(Integer)
47 oar_job_id = Column( Integer,default = -1)
48 node_list = Column(postgresql.ARRAY(String), nullable =True)
50 def __init__ (self, slice_hrn =None, oar_job_id=None, record_id_slice=None, record_id_user= None,peer_authority=None):
53 self.record_id_slice = record_id_slice
55 self.slice_hrn = slice_hrn
57 self.oar_job_id = oar_job_id
59 self.slice_hrn = slice_hrn
61 self.record_id_user= record_id_user
63 self.peer_authority = peer_authority
67 result="<Record id user =%s, slice hrn=%s, oar_job id=%s,Record id slice =%s node_list =%s peer_authority =%s"% \
68 (self.record_id_user, self.slice_hrn, self.oar_job_id, self.record_id_slice, self.node_list, self.peer_authority)
72 def dump_sqlalchemyobj_to_dict(self):
73 dict = {'slice_hrn':self.slice_hrn,
74 'peer_authority':self.peer_authority,
75 'record_id':self.record_id_slice,
76 'record_id_user':self.record_id_user,
77 'oar_job_id':self.oar_job_id,
78 'record_id_slice':self.record_id_slice,
79 'node_list':self.node_list}
81 #class PeerSenslab(SlabBase):
82 #__tablename__ = 'peer_senslab'
83 #peername = Column(String, nullable = False)
84 #peerid = Column( Integer,primary_key=True)
86 #def __init__ (self,peername = None ):
88 #self.peername = peername
92 #result="<Peer id =%s, Peer name =%s" % (self.peerid, self.peername)
97 def __init__(self,config, debug = False):
98 self.sl_base = SlabBase
106 # will be created lazily on-demand
107 self.slab_session = None
108 # the former PostgreSQL.py used the psycopg2 directly and was doing
109 #self.connection.set_client_encoding("UNICODE")
110 # it's unclear how to achieve this in sqlalchemy, nor if it's needed at all
111 # http://www.sqlalchemy.org/docs/dialects/postgresql.html#unicode
112 # we indeed have /var/lib/pgsql/data/postgresql.conf where
113 # this setting is unset, it might be an angle to tweak that if need be
114 # try a unix socket first - omitting the hostname does the trick
115 unix_url = "postgresql+psycopg2://%s:%s@:%s/%s"%\
116 (config.SFA_DB_USER,config.SFA_DB_PASSWORD,config.SFA_DB_PORT,dbname)
117 print >>sys.stderr, " \r\n \r\n SLAPOSTGRES INIT unix_url %s" %(unix_url)
118 # the TCP fallback method
119 tcp_url = "postgresql+psycopg2://%s:%s@%s:%s/%s"%\
120 (config.SFA_DB_USER,config.SFA_DB_PASSWORD,config.SFA_DB_HOST,config.SFA_DB_PORT,dbname)
121 for url in [ unix_url, tcp_url ] :
123 self.slab_engine = create_engine (url,echo_pool = l_echo_pool, echo = l_echo)
129 self.slab_engine=None
130 raise Exception,"Could not connect to database"
135 self.slab_engine.execute ("select 1").scalar()
140 if self.slab_session is None:
141 Session=sessionmaker ()
142 self.slab_session=Session(bind=self.slab_engine)
143 return self.slab_session
148 #Close connection to database
150 if self.connection is not None:
151 self.connection.close()
152 self.connection = None
157 def exists(self, tablename):
159 Checks if the table specified as tablename exists.
164 metadata = MetaData (bind=self.slab_engine)
165 table=Table (tablename, metadata, autoload=True)
168 except NoSuchTableError:
169 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES EXISTS NOPE! tablename %s " %(tablename)
173 def createtable(self, tablename ):
175 Creates the specifed table. Uses the global dictionnary holding the tablenames and
180 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES createtable SlabBase.metadata.sorted_tables %s \r\n engine %s" %(SlabBase.metadata.sorted_tables , slab_engine)
181 SlabBase.metadata.create_all(slab_engine)
184 #Updates the job_id and the nodes list
185 #The nodes list is never erased.
186 def update_job(self, hrn, job_id= None, nodes = None ):
187 slice_rec = slab_dbsession.query(SliceSenslab).filter_by(slice_hrn = hrn).first()
188 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES update_job slice_rec %s"%(slice_rec)
189 if job_id is not None:
190 slice_rec.oar_job_id = job_id
191 if nodes is not None :
192 slice_rec.node_list = nodes
193 slab_dbsession.commit()
195 def find (self, name = None, filter_dict = None):
196 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES find filter_dict %s"%(filter_dict)
198 #Filter_by can not handle more than one argument, hence these functions
199 def filter_id_user(query, user_id):
200 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES find filter_id_user"
201 return query.filter_by(record_id_user = user_id)
203 def filter_job(query, job):
204 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES find filter_job "
205 return query.filter_by(oar_job_id = job)
207 def filer_id_slice (query, id_slice):
208 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES find filer_id_slice"
209 return query.filter_by(record_id_slice = id_slice)
211 def filter_slice_hrn(query, hrn):
212 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES find filter_slice_hrn"
213 return query.filter_by(slice_hrn = hrn)
216 extended_filter = {'record_id_user': filter_id_user,
217 'oar_job_id':filter_job,
218 'record_id_slice': filer_id_slice,
219 'slice_hrn': filter_slice_hrn}
221 Q = slab_dbsession.query(SliceSenslab)
223 if filter_dict is not None:
224 for k in filter_dict:
226 newQ= extended_filter[k](Q, filter_dict[k])
229 print>>sys.stderr, "\r\n \t\t FFFFFFFFFFFFFFFFUUUUUUUUFUFUFU!!!!!!!!"
230 print>>sys.stderr, " HEEEEEEEEEEEEY %s " %(Q.first())
232 print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES find rec %s" %(rec)
233 return dict(zip(['record_id_user','oar_job_id', 'record_id_slice','slice_hrn'],[rec.record_id_user,rec.oar_job_id,rec.record_id_slice, rec.slice_hrn]))
235 ##for rec in Q.all():
236 #reclist.append(dict(zip(['record_id_user','oar_job_id', 'record_id_slice','slice_hrn'],[rec.record_id_user,rec.oar_job_id,rec.record_id_slice, rec.slice_hrn])))
242 from sfa.util.config import Config
244 slab_alchemy= SlabDB(Config())
245 slab_engine=slab_alchemy.slab_engine
246 slab_dbsession=slab_alchemy.session()