Skip to content
Snippets Groups Projects
Commit 8850ea55 authored by Stephan Philips's avatar Stephan Philips
Browse files

update readme

parent 2c9f3733
No related branches found
No related tags found
No related merge requests found
......@@ -183,3 +183,43 @@ ds.m1.x.name
```
# Browsing for data
# Developer information
## Database structure
The user has two configurations to operate in,
* local database
* local database + server side database (network connection needed)
The serverside configuration offers the advantage that you can access the data of your experiments from any device. The serverside is combined with a local database, in order to be able to keep operating when the network is down.
When operating only locally, no features are lost. Mainly you don't have back up and you can't access the data with a device that is not your measurement computer.
To accommodate a structure that can dynamically switch between both modes, the following tables are made :
### Client side
* global_measurement_overview : contains all the local measurements performed. Used for queries when the server is unavailable (e.g. no network).
* projects_set_up_sample table : local table containing all the data of the current sample being measured. This tables are kept on the system, even if you switch sample (the data linking to it might be removed if you set a storage limit (only data that has been moved to the server))
* project_set_up_sample overview : simple table summarizing all the projects_set_up_sample on the client side
* projects_set_up_sample_id tables : tables containing the raw data of all the local experiments
### Server side
* global_measurement_overview : same as for client side, only much bigger (this will have consequences, see further)
* projects_set_up_sample : synced version of the client side (see conflicts section for more information on how conflicts between two set ups measuring on the same sample at the same time are handled.)
* project_set_up_sample overview : simple table summarizing all the projects_set_up_sample on the server side
* projects_set_up_sample_id tables : tables containing the raw data of all experiments.
Ensure scalability: fast searches. In order to sort through less data in searches, additional tables are build, containing views of the global_measurement_overview table (note that this are physical tables and not views).
* project table
* set_up table
* project_set_up table
* project_set_up_sample table
General question:
* should the measured data be in a different schema?
### Conflict resolution
When looking a this scheme there is one error that can be made and is irritating to solve. Two systems are writing to the same table locally, both tables get out of sync --> measurement ID's of one of the two systems have to be adjusted in order for the tables to be able to merge.
import numpy as np
class buffer_writer:
class buffer_reference:
'''
object in case a user want to take a copy of the reader/writer
'''
def __init__(self, data):
self.buffer = data
self.buffer_lambda = buffer_reference.__empty_lambda
@property
def data(self):
return self.buffer_lambda(self.buffer)
@staticmethod
def __empty_lambda(data):
return data
@staticmethod
def averaging_lambda(dim):
def avg_lambda(data):
return np.average(data, axis=dim)
return avg_lambda
@staticmethod
def slice_lambda(args):
def slice_lambda(data):
return data[tuple(args)]
return slice_lambda
@staticmethod
def reshaper(shape):
def reshape(data):
return data.reshape(shape)
return reshape
class buffer_writer(buffer_reference):
def __init__(self, SQL_conn, input_buffer):
self.conn = SQL_conn
self.buffer = input_buffer.ravel()
self.buffer_lambda = buffer_reference.reshaper(input_buffer.shape)
self.lobject = self.conn.lobject(0,'w')
self.oid = self.lobject.oid
......@@ -59,12 +95,13 @@ class buffer_writer:
# reset writing position
self.lobject.seek(self.cursor*8)
class buffer_reader:
class buffer_reader(buffer_reference):
def __init__(self, SQL_conn, oid, shape):
'''
'''
self.conn = SQL_conn
self.buffer = np.full(shape, np.nan).ravel()
self.buffer_lambda = buffer_reference.reshaper(shape)
self.oid = oid
self.lobject = self.conn.lobject(oid,'rb')
......@@ -91,30 +128,3 @@ class buffer_reader:
data = np.frombuffer(binary_data)
self.buffer.flat[start:start+data.size] = data
class buffer_reference:
'''
object in case a user want to take a copy of the reader/writer
'''
def __init__(self, data):
self._buffer = data
self.buffer_lambda = buffer_reference.__empty_lambda
@property
def buffer(self):
return self.buffer_lambda(self._buffer)
@staticmethod
def __empty_lambda(data):
return data
@staticmethod
def averaging_lambda(dim):
def avg_lambda(data):
return np.average(data, axis=dim)
return avg_lambda
@staticmethod
def slice_lambda(args):
def slice_lambda(data):
return data[tuple(args)]
return slice_lambda
\ No newline at end of file
# postgrestables to make
###############
# server side #
###############
# overiew_projects_set_ups_samples table
# generic_data_table
### needed for fast searches
# project table
# set up table
# project_set_up table
# project setup sample table
###############
# client side #
###############
# local general table (fallback table) -- field synced -- create index
# overiew_projects_set_ups_samples table local (fallback table)
# project setup sample tables that have been generated locally
#########################
# irritating scenario's #
#########################
# two systems are writing to the same table locally, both tables get out of sync
# --> measurement ID's of one of the two systems have to be adjusted in order for the tables to be able to merge.
......@@ -21,7 +21,7 @@ def create_new_data_set(experiment_name, *m_params):
m_param.init_data_dataclass()
ds.measurement_parameters += [m_param]
ds.measurement_parameters_raw += m_param.to_SQL_data_structure()
print(ds.measurement_parameters_raw)
SQL_mgr = SQL_database_manager()
SQL_mgr.register_measurement(ds)
......@@ -41,7 +41,6 @@ class data_set_desciptor(object):
return getattr(getattr(obj,"_data_set__data_set_raw"), self.var)
class data_set:
exp_id = data_set_desciptor('exp_id')
running = data_set_desciptor('uploaded_complete')
dbname = data_set_desciptor('dbname')
......@@ -68,7 +67,6 @@ class data_set:
def __init__(self, ds_raw):
self.id = None
self.__data_set_raw = ds_raw
print(ds_raw)
self.__repr_attr_overview = []
self.__init_properties(m_param_origanizer(ds_raw.measurement_parameters_raw))
self.last_commit = time.time()
......@@ -149,7 +147,7 @@ class data_set:
SQL_mgr.update_write_cursors(self.__data_set_raw)
def __repr__(self):
output_print = "DataSet :: {}\n\nid = {}\nTrueID = 1225565471200\n\n".format(self.name, self.run_id)
output_print = "DataSet :: {}\n\nid = {}\nTrueID = 1225565471200\n\n".format(self.name, self.exp_id)
output_print += "| idn | label | unit | size |\n"
output_print += "---------------------------------------------------------------------------\n"
for i in self.__repr_attr_overview:
......
......@@ -109,13 +109,13 @@ class dataset_data_description():
def __call__(self):
if self.__raw_data.setpoint is True or self.__raw_data.setpoint_local is True:
if self.__raw_data.data_buffer.buffer.ndim > 1: #over dimensioned
idx = [0] * self.__raw_data.data_buffer.buffer.ndim
if self.__raw_data.data_buffer.data.ndim > 1: #over dimensioned
idx = [0] * self.__raw_data.data_buffer.data.ndim
idx[self.__raw_data.nth_dim] = slice(None)
return self.__raw_data.data_buffer.buffer[tuple(idx[::-1])]
return self.__raw_data.data_buffer.data[tuple(idx[::-1])]
return self.__raw_data.data_buffer.buffer
return self.__raw_data.data_buffer.data
@property
def shape(self):
......@@ -126,7 +126,7 @@ class dataset_data_description():
return len(self.shape)
def full(self):
return self.__raw_data.data_buffer.buffer
return self.__raw_data.data_buffer.data
def average(self, dim):
'''
......@@ -144,7 +144,7 @@ class dataset_data_description():
raw_data_org_copy = copy.copy(self.__raw_data_org)
raw_data_cpy = raw_data_org_copy.get(self.__raw_data.param_id, self.__raw_data.nth_set)
raw_data_cpy.dependency.pop(dim)
raw_data_cpy.data_buffer.buffer_lambda = raw_data_cpy.data_buffer.averaging_lambda(raw_data_cpy.data_buffer.buffer.ndim-1-dim)
raw_data_cpy.data_buffer.buffer_lambda = raw_data_cpy.data_buffer.averaging_lambda(raw_data_cpy.data_buffer.data.ndim-1-dim)
return dataset_data_description(self.name, raw_data_cpy, raw_data_org_copy)
......@@ -246,8 +246,8 @@ if __name__ == '__main__':
data2 = buffer_reference(np.zeros([100, 100, 10]))
data3 = buffer_reference(np.zeros([10]))
data4 = buffer_reference(np.zeros([100,100]))
data1.buffer[0,:] = -5
data1.buffer[:,0] = 5
data1.data[0,:] = -5
data1.data[:,0] = 5
a = m_param_raw(param_id=1636274596872, nth_set=0, nth_dim=-1, param_id_m_param=1636274596872, setpoint=False, setpoint_local=False, name_gobal='test', name='chan_1', label='keithley 1', unit='pA', dependency=[1635967634696, 1635967635080], shape='[100, 100]', size=100000, oid=16478, data_buffer=data1)
......
......@@ -32,11 +32,11 @@ class Measurement:
'''
class used to describe a measurement.
'''
def __init__(self):
def __init__(self, name):
self.setpoints = dict()
self.m_param = dict()
self.dataset = None
self.name = 'todo'
self.name = name
def register_set_parameter(self, parameter, n_points):
'''
......@@ -200,8 +200,8 @@ if __name__ == '__main__':
x = 100
y = 100
m_param = m4
meas = Measurement()
m_param = m1
meas = Measurement('no name')
meas.register_set_parameter(a1, x)
meas.register_set_parameter(a2, y)
......@@ -232,6 +232,6 @@ if __name__ == '__main__':
# print('results', i ,j, z)
ds.add_result((a1, i), (a2, j), (m_param, z))
print(ds.dataset)
t1 =time.time()
print(meas.dataset)
print(t1-t0)
\ No newline at end of file
from OpenGL.GL import *
from OpenGL.GLUT import *
from OpenGL.GLU import *
import time
w,h= 500,500
def square():
glBegin(GL_QUADS)
glVertex2f(100, 100)
glVertex2f(200, 100)
glVertex2f(200, 200)
glVertex2f(100, 200)
glEnd()
def iterate():
glViewport(0, 0, 500, 500)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0.0, 500, 0.0, 500, 0.0, 1.0)
glMatrixMode (GL_MODELVIEW)
glLoadIdentity()
def showScreen():
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glLoadIdentity()
iterate()
glColor3f(1.0, 0.0, 3.0)
square()
glutSwapBuffers()
glutInit()
glutInitDisplayMode(GLUT_RGBA)
glutInitWindowSize(500, 500)
glutInitWindowPosition(0, 0)
wind = glutCreateWindow("OpenGL Coding Practice")
glutDisplayFunc(showScreen)
glutIdleFunc(showScreen)
glutMainLoop()
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment