class SliceSenslab (SlabBase):
__tablename__ = 'slice_senslab'
- record_id_user = Column(Integer, primary_key=True)
+ #record_id_user = Column(Integer, primary_key=True)
+ slice_hrn = Column(String,primary_key=True)
+ peer_authority = Column( String,nullable = True)
+ record_id_slice = Column(Integer)
+ record_id_user = Column(Integer)
oar_job_id = Column( Integer,default = -1)
- record_id_slice = Column(Integer)
- slice_hrn = Column(String,nullable = False)
node_list = Column(postgresql.ARRAY(String), nullable =True)
- def __init__ (self, slice_hrn =None, oar_job_id=None, record_id_slice=None, record_id_user= None):
+ def __init__ (self, slice_hrn =None, oar_job_id=None, record_id_slice=None, record_id_user= None,peer_authority=None):
self.node_list = []
if record_id_slice:
self.record_id_slice = record_id_slice
self.slice_hrn = slice_hrn
if record_id_user:
self.record_id_user= record_id_user
+ if peer_authority:
+ self.peer_authority = peer_authority
def __repr__(self):
- result="<Record id user =%s, slice hrn=%s, oar_job id=%s,Record id slice =%s node_list =%s" % \
- (self.record_id_user, self.slice_hrn, self.oar_job_id, self.record_id_slice, self.node_list)
+ result="<Record id user =%s, slice hrn=%s, oar_job id=%s,Record id slice =%s node_list =%s peer_authority =%s"% \
+ (self.record_id_user, self.slice_hrn, self.oar_job_id, self.record_id_slice, self.node_list, self.peer_authority)
result += ">"
return result
- def dumpquerytodict(self):
+ def dump_sqlalchemyobj_to_dict(self):
dict = {'slice_hrn':self.slice_hrn,
+ 'peer_authority':self.peer_authority,
'record_id':self.record_id_slice,
'record_id_user':self.record_id_user,
'oar_job_id':self.oar_job_id,
- 'record_id_slice':self.record_id_slice,
- 'slice_hrn':self.slice_hrn,
+ 'record_id_slice':self.record_id_slice,
'node_list':self.node_list}
return dict
#class PeerSenslab(SlabBase):
#return result
class SlabDB:
- def __init__(self,config):
+ def __init__(self,config, debug = False):
self.sl_base = SlabBase
dbname="slab_sfa"
+ if debug == True :
+ l_echo_pool = True
+ l_echo=True
+ else :
+ l_echo_pool = False
+ l_echo = False
# will be created lazily on-demand
self.slab_session = None
# the former PostgreSQL.py used the psycopg2 directly and was doing
(config.SFA_DB_USER,config.SFA_DB_PASSWORD,config.SFA_DB_HOST,config.SFA_DB_PORT,dbname)
for url in [ unix_url, tcp_url ] :
try:
- self.slab_engine = create_engine (url,echo_pool=True,echo=True)
+ self.slab_engine = create_engine (url,echo_pool = l_echo_pool, echo = l_echo)
self.check()
self.url=url
return
SlabBase.metadata.create_all(slab_engine)
return
-
+ #Updates the job_id and the nodes list
+ #The nodes list is never erased.
def update_job(self, hrn, job_id= None, nodes = None ):
slice_rec = slab_dbsession.query(SliceSenslab).filter_by(slice_hrn = hrn).first()
print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES update_job slice_rec %s"%(slice_rec)
#reclist = []
##for rec in Q.all():
#reclist.append(dict(zip(['record_id_user','oar_job_id', 'record_id_slice','slice_hrn'],[rec.record_id_user,rec.oar_job_id,rec.record_id_slice, rec.slice_hrn])))
- #print>>sys.stderr, " \r\n \r\n \t SLABPOSTGRES find reclist %s" %(reclist)
#return reclist