fork download
  1. """
  2. Represents a PBS database layout and provides algorithms to sync PBS data
  3. with intm model.
  4.  
  5. API: Sync datewise by passing a date to sync function.
  6.  
  7. """
  8. from __future__ import absolute_import
  9. from django.db import models
  10. from django.db.utils import IntegrityError, DataError
  11. from . import utils, pervasive
  12. import re
  13. import datetime
  14. import sys
  15. import logging
  16. import threading
  17.  
  18. logger = logging.getLogger(__name__)
  19.  
  20.  
  21. class SyncError(Exception):
  22. """
  23. General class for all exceptions in this module
  24. """
  25. def __init__(self, message):
  26. self.message = message
  27.  
  28. def __str__(self):
  29. return repr(self.message)
  30.  
  31.  
  32. def sync(date):
  33. for s in Subsidiary.objects.all():
  34. for m in [Transaction, Receipt, Invoice, Staff, Contractor]:
  35. Sync(m, s, date).sync()
  36.  
  37. if date != datetime.date.today():
  38. Updated.objects.get_or_create(date=date)[0].save()
  39.  
  40.  
  41. class Sync(object):
  42.  
  43. def __init__(self, model, subsidiary, date):
  44.  
  45. self.pbs = PBS(
  46. pk = model.PBSID,
  47. pk_format = model.PBSID_FORMAT,
  48. fieldmap = model.PBS_FIELDS,
  49. table_name = model.PBS_TABLE,
  50. dsn = (subsidiary.primary_dsn, subsidiary.replica_dsn),
  51. date_filter = {model.DATE_FIELD: date}
  52. )
  53.  
  54. self.date = date
  55. self.model = model
  56. self.subsidiary = subsidiary
  57.  
  58. self._intm = model.objects.order_by(model.PBSID)
  59.  
  60. if self.model.SUBSIDIARY_FIELD:
  61. self._intm = self._intm.filter(**{self.model.SUBSIDIARY_FIELD:self.subsidiary})
  62.  
  63. self.intm = self._intm
  64.  
  65. self.set_date(self.date)
  66.  
  67.  
  68. def sync(self):
  69. logger.info("sync: %s, %s, %s"
  70. % (self.model.__name__, self.subsidiary, self.date))
  71.  
  72. if not self.subsidiary.main and self.model.MAIN_SUBSIDIARY_ONLY:
  73. logger.info("skipping: %s only syncs with main subsidiary" % self.model.__name__)
  74. return
  75.  
  76. self.clean()
  77. self.pair()
  78. self.fill(1)
  79.  
  80.  
  81. def set_date(self, date):
  82. if self.model.DATE_FIELD:
  83. self.pbs.set_query({self.model.DATE_FIELD:date})
  84. self.intm = self._intm.filter(**{self.model.DATE_FIELD:date})
  85.  
  86.  
  87. def clean(self):
  88. """
  89. Filling in dates on entries the has None on expected date fields.
  90. """
  91. if not self.model.DATE_FIELD:
  92. return
  93.  
  94. self.set_date(None)
  95.  
  96. logger.info("found %s undated rows on %s/%s" % (len(self.intm), self.subsidiary, self.model.__name__))
  97.  
  98. while len(self.intm):
  99. pbsid_range = next(self.chunkwise(self.intm, self.pbs.query_length(), True))
  100.  
  101. self.update(pbsid_range, by_queryset=True)
  102.  
  103. self.set_date(self.date)
  104.  
  105.  
  106. def pair(self):
  107. """
  108. Pulls entries from PBS and adds to intm and, if necessary, deletes entries from intm that doesn't
  109. exist in PBS.
  110. """
  111.  
  112. if not self.model.LINEAR_INDEX:
  113. return 0
  114.  
  115. logger.info("running add on %s/%s" % (self.subsidiary, self.model.__name__))
  116.  
  117. found_rows = 1
  118. s = 0
  119.  
  120. while found_rows:
  121.  
  122. pbsid_range, overflow = self.first_mismatch(threshold=self.pbs.query_length())
  123.  
  124. if overflow:
  125. intm = self.intm_from_range(pbsid_range)
  126.  
  127. logger.warning("deleting %s unmatched rows in %s (first_pbsid, last_pbsid)" % (len(intm), str(pbsid_range)))
  128.  
  129. intm.delete()
  130.  
  131. return self.pair(s=s)
  132.  
  133. update_duplicates = pbsid_range[1] != None
  134. found_rows, added_rows, last_pbsid = self.add(pbsid_range, update_duplicates=update_duplicates)
  135.  
  136. s += added_rows
  137.  
  138. return s
  139.  
  140.  
  141. def fill(self, fields):
  142. """
  143. Updates entries where aggregation doesn't match.
  144. """
  145. for field in self.model.MATCH_FIELDS[0:fields]:
  146.  
  147. count = not self.model.PBS_FIELDS[field][2]
  148.  
  149. if self.model.LINEAR_INDEX:
  150. intm_value, pbs_value = self.diff(
  151. field = field,
  152. pbsid_range = (None, None),
  153. by_queryset = False,
  154. count = count,
  155. exclude_empty = True,
  156. )
  157.  
  158. if abs(intm_value - pbs_value) < 0.01:
  159. continue
  160.  
  161. for pbsid_range in self.chunkwise(intm=self.intm, step=self.pbs.query_length(), closed=True):
  162. intm_value, pbs_value = self.diff(
  163. field = field,
  164. pbsid_range = pbsid_range,
  165. by_queryset = True,
  166. count = count,
  167. exclude_empty = True,
  168. )
  169.  
  170. if abs(intm_value - pbs_value) < 0.01 or (count and intm_value > pbs_value):
  171. continue
  172.  
  173. self.update(pbsid_range, by_queryset=True)
  174.  
  175.  
  176. def first_mismatch(self, threshold):
  177. """
  178. Finds and returns the first mismatch between PBS and intm and returns the position and mismatch type
  179. in the format ((first_pbsid, last_pbsid), overflow).
  180.  
  181. first/last pbsid gives the range where the mismatch is located and overflow is True if the number of
  182. elements in intm is greater than in pbs.
  183. """
  184.  
  185. min_index = 0
  186. max_index = len(self.intm) - 1
  187.  
  188. if max_index < 0:
  189. return ((None, None), False)
  190.  
  191. # set arguments for diff to strictly count elements
  192. diff_args = {
  193. 'field': self.model.COUNTER_FIELD,
  194. 'by_queryset': False,
  195. 'count': True,
  196. 'exclude_empty': False,
  197. }
  198.  
  199. # Upper edge case: Entries are synced up to last intm entry
  200. pbsid_range = (None, self.pbs.pbsid_from_intm(self.intm[max_index]))
  201. intm_count, pbs_count = self.diff(pbsid_range=pbsid_range, **diff_args)
  202.  
  203. if abs(intm_count - pbs_count) < 0.01:
  204. return ((pbsid_range[1], None), False)
  205.  
  206. # Lower edge case: Entries are unsynced before first intm entry
  207. pbsid_range = (None, self.pbs.pbsid_from_intm(self.intm[min_index]))
  208. intm_count, pbs_count = self.diff(pbsid_range=pbsid_range, **diff_args)
  209.  
  210. if abs(intm_count - pbs_count) >= 0.01:
  211. return ((None, pbsid_range[1]), intm_count > pbs_count)
  212.  
  213. while True:
  214. mid_index = min_index + (max_index - min_index)/2
  215.  
  216. pbsid_range = (None, self.pbs.pbsid_from_intm(self.intm[mid_index]))
  217. intm_count, pbs_count = self.diff(pbsid_range=pbsid_range, **diff_args)
  218.  
  219. diff = abs(intm_count - pbs_count) >= 0.01
  220.  
  221. min_index, max_index = (min_index, mid_index) if diff else (mid_index, max_index)
  222.  
  223. if diff and (max_index - min_index <= threshold):
  224. return (
  225. (self.pbs.pbsid_from_intm(self.intm[min_index]), self.pbs.pbsid_from_intm(self.intm[max_index])),
  226. intm_count > pbs_count,
  227. )
  228.  
  229.  
  230. def add(self, pbsid_range, update_duplicates):
  231. """
  232. Adds a result set to intm and returns number of inserted items.
  233.  
  234. Utilizes django's bulk_create method which will raise IntegrityError
  235. if unique fields are violated.
  236.  
  237. If the length of the created rows matches CHUNK_SIZE, add will assume that
  238. next add could be a full chunk too and spawns off a thread based on that
  239. assumption.
  240. """
  241.  
  242. logger.debug("fetching %s (first_pbsid, last_pbsid)" % str(pbsid_range))
  243.  
  244. result = self.pbs.fetch_by_range(pbsid_range, include_endpoints=False)
  245.  
  246. logger.debug("adding %s rows, %s (first_pbsid, last_pbsid), Filter duplicates:%s"
  247. % (len(result), str(pbsid_range), update_duplicates))
  248.  
  249. bulk_create = []
  250.  
  251. r = []
  252. count = 0
  253.  
  254. for r in result:
  255. fields = self.cast_row(r)
  256.  
  257. if update_duplicates:
  258. count = self.intm_from_range(
  259. pbsid_range=(fields[self.model.PBSID], fields[self.model.PBSID]),
  260. intm=self.model.objects.filter(subsidiary=self.subsidiary)
  261. ).update(**fields)
  262.  
  263. if count == 0:
  264. bulk_create.append(self.model(subsidiary=self.subsidiary, **fields))
  265.  
  266.  
  267. self.model.objects.bulk_create(bulk_create)
  268.  
  269. self.intm.update()
  270.  
  271. logger.debug("added %s/%s (added/result) rows, %s (first_pbsid, last_pbsid)"
  272. % (len(bulk_create), len(result), str(pbsid_range)))
  273.  
  274. return len(result), len(bulk_create), r[self.model.PBSID] if r else None
  275.  
  276.  
  277. def update(self, pbsid_range, by_queryset):
  278. """
  279. Updates the fields in a query set by requesting the identifiers in intm from PBS
  280. and return the number of rows matched (not changed).
  281.  
  282. """
  283. logger.debug("fetching updates %s (first_pbsid, last_pbsid)" % str(pbsid_range))
  284. intm = self.intm_from_range(pbsid_range, self.intm)
  285.  
  286. if by_queryset:
  287. result = self.pbs.fetch_by_queryset(intm)
  288. else:
  289. result = self.pbs.fetch_by_range(pbsid_range, include_endpoints=True)
  290.  
  291. s = 0
  292. for r in result:
  293. fields = self.cast_row(r)
  294. count = self.intm_from_range((fields[self.model.PBSID], fields[self.model.PBSID]), self.intm).update(**fields)
  295.  
  296. if count > 1:
  297. logger.warning("updated %s rows on id %s" % (count, fields[self.model.PBSID]))
  298. if count < 1:
  299. pass
  300.  
  301. s += count
  302.  
  303. self.intm.update()
  304.  
  305. logger.debug("updated %s/%s (updated/data) rows, %s (first_pbsid, last_pbsid)"
  306. % (s, len(result), str(pbsid_range)))
  307.  
  308. return s
  309.  
  310.  
  311. def diff(self, field, pbsid_range, by_queryset, count, exclude_empty):
  312. intm = self.intm_from_range(pbsid_range, self.intm)
  313.  
  314. if by_queryset:
  315. pbs_value = self.pbs.aggregate_by_queryset(field, intm, count, exclude_empty)
  316. else:
  317. pbs_value = self.pbs.aggregate_by_range(field, pbsid_range, count, exclude_empty)
  318.  
  319. if count:
  320. if exclude_empty:
  321. intm_value = len(intm.exclude(**{field: None}))
  322. else:
  323. intm_value = len(intm)
  324. else:
  325. intm_value = intm.aggregate(models.Sum(field))[field + '__sum']
  326.  
  327. intm_value = intm_value or 0
  328. pbs_value = pbs_value or 0
  329. diff = intm_value - pbs_value
  330.  
  331. logger.debug("diff: %s-%s=%s (intm - pbs = diff)" % (intm_value, pbs_value, diff))
  332.  
  333. return intm_value, pbs_value
  334.  
  335.  
  336. def chunkwise(self, intm, step, closed):
  337. min_index = 0
  338. max_index = len(intm) - 1
  339.  
  340. if not closed:
  341. min_index -= 1
  342. max_index += 1
  343.  
  344. if max_index < 0:
  345. raise StopIteration
  346.  
  347. """
  348. If step evaluates to false, the entire queryset should be returned.
  349. Setting step to max_index will render a full closed interval
  350. while adding 2 will render a full open interval (None, None)
  351. """
  352. step = step or (max_index + (0 if closed else 2))
  353.  
  354. i = min_index
  355.  
  356. while i <= max_index:
  357. j = min(i + step, max_index)
  358.  
  359. first_pbsid = None
  360. if i != -1:
  361. first_pbsid = self.pbs.pbsid_from_intm(intm[i])
  362.  
  363. last_pbsid = None
  364. if j != len(intm):
  365. last_pbsid = self.pbs.pbsid_from_intm(intm[j])
  366.  
  367. yield (first_pbsid, last_pbsid)
  368.  
  369. i += step + (1 if closed else 0)
  370.  
  371.  
  372. def intm_from_range(self, pbsid_range, intm):
  373. f = {}
  374.  
  375. if not pbsid_range[0] == None:
  376. f[self.model.PBSID + '__gte'] = pbsid_range[0]
  377.  
  378. if not pbsid_range[1] == None:
  379. f[self.model.PBSID + '__lte'] = pbsid_range[1]
  380.  
  381. return intm.filter(**f)
  382.  
  383.  
  384. def cast(self, f, v):
  385. try:
  386. c = self.model.PBS_FIELDS[f][1](v, subsidiary=self.subsidiary)
  387. except DataError as e:
  388. logger.error("bad identifier %s for field %s/%s (cast: %s)" %
  389. (v, f, self.model.PBS_FIELDS[f][0], self.model.PBS_FIELDS[f][1].__name__))
  390. raise e
  391.  
  392. return c
  393.  
  394.  
  395. def cast_row(self, r):
  396. return {c:self.cast(c, r[c]) for c in r}
  397.  
  398.  
  399. class PBS(object):
  400.  
  401. CHUNK_SIZE = 1000
  402.  
  403. def __init__(self, pk, pk_format, fieldmap, table_name, dsn, date_filter):
  404. self.fieldmap = fieldmap
  405. self.table_name = table_name
  406. self.pk = pk
  407. self.pk_format = pk_format
  408.  
  409. self.__dsn = None
  410. self.__query = None
  411. self.__query_dateless = None
  412.  
  413. self.__cache = {}
  414. self.__thread_pool = {}
  415.  
  416. self.set_dsn(dsn, date_filter)
  417. self.set_query(date_filter)
  418.  
  419.  
  420. def fetch_by_queryset(self, intm):
  421. """
  422. Fetch from PBS based on intm-queryset by adding "in (id_1, id_2, ...)" to the query.
  423. """
  424. q = self.__query % self.query_fields()
  425. q += " and %s" % (self.query_filter(intm))
  426. q += " order by %s asc" % self.pk
  427.  
  428. result = pervasive.query(self.__dsn, q)
  429.  
  430. logger.debug("fetched %s/%s rows" % (len(result), len(intm)))
  431.  
  432. return result
  433.  
  434.  
  435. def fetch_by_range(self, pbsid_range, include_endpoints):
  436. """
  437. Returns all items in the PBS table within pbsid_range.
  438.  
  439. """
  440.  
  441. cached_result = self.get_cached_fetch(pbsid_range)
  442.  
  443. if cached_result:
  444. return cached_result
  445.  
  446. q = self.__query % "top %i %s" % (PBS.CHUNK_SIZE, self.query_fields())
  447.  
  448. if pbsid_range[0] != None:
  449. q += " and %s >%s '%s'" % (self.pk, ('=' if include_endpoints else ''), pbsid_range[0])
  450.  
  451. if pbsid_range[1] != None:
  452. q += " and %s <%s '%s'" % (self.pk, ('=' if include_endpoints else ''), pbsid_range[1])
  453.  
  454. q += " order by %s asc" % self.pk
  455.  
  456. result = pervasive.query(self.__dsn, q)
  457.  
  458. logger.debug("fetched %s rows on %s (first_pbsid, last_pbsid)" % (len(result), str(pbsid_range)))
  459.  
  460. return result
  461.  
  462.  
  463. def aggregate_by_queryset(self, field, intm, count, exclude_empty):
  464. if count:
  465. aggregate = 'count'
  466. cast = int
  467. else:
  468. aggregate = 'sum'
  469. cast = self.fieldmap[field][1]
  470.  
  471. q = self.__query % ("%s(%s) as c" % (aggregate, self.fieldmap[field][0]))
  472.  
  473. q += " and %s" % self.query_filter(intm)
  474.  
  475. if exclude_empty:
  476. q += " and %s <> ''" % self.fieldmap[field][0]
  477.  
  478. try:
  479. s = cast(pervasive.query(self.__dsn, q)[0]['c'])
  480. except IndexError:
  481. s = 0
  482.  
  483. logger.debug("aggregate (%s) field %s = %s (len(intm)=%s)"
  484. % (aggregate, field, s, len(intm)))
  485.  
  486. return s
  487.  
  488.  
  489. def aggregate_by_range(self, field, pbsid_range, count, exclude_empty):
  490.  
  491. if count:
  492. aggregate = 'count'
  493. cast = int
  494. else:
  495. aggregate = 'sum'
  496. cast = self.fieldmap[field][1]
  497.  
  498. q = self.__query % ("%s(%s) as c" % (aggregate, self.fieldmap[field][0]))
  499.  
  500. if pbsid_range[0] != None:
  501. q += " and %s >= '%s'" % (self.fieldmap[self.pk][0], pbsid_range[0])
  502.  
  503. if pbsid_range[1] != None:
  504. q += " and %s <= '%s'" % (self.fieldmap[self.pk][0], pbsid_range[1])
  505.  
  506. if exclude_empty:
  507. q += " and %s <> ''" % self.fieldmap[field][0]
  508.  
  509. try:
  510. s = cast(pervasive.query(self.__dsn, q)[0]['c'])
  511. except IndexError:
  512. s = 0
  513.  
  514. logger.debug("aggregate (%s) field %s = %s, %s (first_pbsid, last_pbsid)" % (aggregate, field, s, str(pbsid_range)))
  515.  
  516. return s
  517.  
  518.  
  519. def cache_fetch(self, pbsid_range):
  520. logger.debug("caching rows on %s (first_pbsid, last_pbsid)" % str(pbsid_range))
  521.  
  522. result = self.fetch_by_range(pbsid_range, include_endpoints=False)
  523.  
  524. self.__cache['fetch'] = {
  525. 'pbsid_range': pbsid_range,
  526. 'result': result,
  527. }
  528.  
  529. logger.debug("cached %s rows on %s (first_pbsid, last_pbsid)" % (len(result), str(pbsid_range)))
  530.  
  531.  
  532. def get_cached_fetch(self, pbsid_range):
  533. result = []
  534.  
  535. if 'fetch' in self.__thread_pool and self.__thread_pool['fetch'].is_alive():
  536. logger.debug("waiting for thread %s" % self.__thread_pool['fetch'])
  537. if self.__thread_pool['fetch'] != threading.current_thread():
  538. self.__thread_pool['fetch'].join()
  539. logger.debug("joined with thread %s" % self.__thread_pool['fetch'])
  540. else:
  541. logger.debug("skipped join with thread %s" % self.__thread_pool['fetch'])
  542.  
  543. try:
  544. if self.__cache['fetch']['pbsid_range'] == pbsid_range:
  545. result = self.__cache['fetch']['result']
  546. except KeyError:
  547. pass
  548.  
  549. logger.debug("got cache %s rows on %s (first_pbsid, last_pbsid)" % (len(result), str(pbsid_range)))
  550.  
  551. return result
  552.  
  553.  
  554. def pbsid_from_intm(self, intm):
  555. pbsid = self.pk_format.format(getattr(intm, self.pk))
  556. return pbsid
  557.  
  558.  
  559. def range_from_intm(self, intm):
  560. return (self.pbsid_from_intm(intm.first()), self.pk_from_intm(intm.last()))
  561.  
  562.  
  563. def set_query(self, date):
  564. date_field = date.keys()[0]
  565. date = date[date_field]
  566.  
  567. query = "select %s " + "from %s" % self.table_name
  568.  
  569. if date_field != None and date !=None:
  570. self.__query = query + " where %s='%s'" % (
  571. self.fieldmap[date_field][0],
  572. utils.date_to_str(date) if date else '',
  573. )
  574. else:
  575. self.__query = query + " where 1=1"
  576.  
  577. logger.debug("query skeleton set to '%s'" % self.__query)
  578.  
  579.  
  580. def set_dsn(self, dsn, date):
  581. date_field = date.keys()[0]
  582. date = date[date_field]
  583.  
  584. self.__dsn = (
  585. dsn[1]
  586. if date != datetime.date.today()
  587. and dsn[1]
  588. else dsn[0]
  589. )
  590.  
  591.  
  592. def query_fields(self):
  593. return ','.join("%s as '%s'"
  594. % (self.fieldmap[f][0], f) for f in self.fieldmap if self.fieldmap[f][0])
  595.  
  596.  
  597. def query_filter(self, intm):
  598. identifiers = [self.pk_format.format(getattr(i, self.pk)) for i in intm]
  599.  
  600. return "%s in ('%s')" % (
  601. self.fieldmap[self.pk][0],
  602. "','".join(identifiers)
  603. )
  604.  
  605.  
  606. def query_length(self):
  607. return (pervasive.MAX_QUERY_LENGTH - 800)/(len(self.pk_format.format(0)) + 3)
  608.  
  609.  
  610. class Subsidiary(models.Model):
  611. name = models.CharField(max_length=40, unique=True)
  612. main = models.BooleanField(default=False)
  613. primary_dsn = models.CharField(max_length=10, unique=True)
  614. replica_dsn = models.CharField(max_length=10, unique=True, null=True, blank=True)
  615.  
  616. def __unicode__(self):
  617. return self.name
  618.  
  619.  
  620. class Item(models.Model):
  621. identifier = models.CharField(max_length=25, unique=True)
  622.  
  623. @classmethod
  624. def get_or_create(C, value, **context):
  625. identifier = utils.strip_or_none(value)
  626.  
  627. if identifier == None:
  628. return None
  629.  
  630. return C.objects.get_or_create(identifier=identifier)[0]
  631.  
  632. def __unicode__(self):
  633. return self.identifier
  634.  
  635.  
  636. class Staff(models.Model):
  637. class Meta:
  638. ordering = ['name', 'identifier']
  639.  
  640. COUNTER_FIELD = 'identifier'
  641. MATCH_FIELDS = ['name', 'display_name']
  642. DATE_FIELD = None
  643. SUBSIDIARY_FIELD = None
  644. LINEAR_INDEX = False
  645. MAIN_SUBSIDIARY_ONLY = True
  646.  
  647. PBSID = 'identifier'
  648. PBSID_FORMAT = u'{0: <4}'
  649.  
  650. PBS_TABLE = 'pupers'
  651. PBS_FIELDS = {
  652. 'identifier': ('d2601', utils.strip_or_none, False),
  653. 'name': ('d2621', utils.strip_or_none, False),
  654. 'display_name': ('d2681', utils.strip_or_none, False),
  655. 'is_seller': ('d2701', utils.eight_bit_to_bool, False),
  656. }
  657.  
  658. identifier = models.CharField(max_length=4, unique=True)
  659. display_name = models.CharField(max_length=20, null=True)
  660. name = models.CharField(max_length=40, null=True)
  661. is_seller = models.NullBooleanField()
  662.  
  663. @classmethod
  664. def get_or_create(C, value, **context):
  665. identifier = utils.strip_or_none(value)
  666.  
  667. if identifier == None:
  668. return None
  669.  
  670. return C.objects.get_or_create(identifier=identifier)[0]
  671.  
  672. def __unicode__(self):
  673. return u"%s - %s" % (self.identifier, self.name)
  674.  
  675.  
  676. class Contractor(models.Model):
  677. class Meta:
  678. unique_together = ('subsidiary', 'identifier')
  679.  
  680. COUNTER_FIELD = 'identifier'
  681. MATCH_FIELDS = ['name']
  682. DATE_FIELD = None
  683. SUBSIDIARY_FIELD = 'subsidiary'
  684. LINEAR_INDEX = False
  685. MAIN_SUBSIDIARY_ONLY = False
  686.  
  687. PBSID = 'identifier'
  688. PBSID_FORMAT = u'{0: <10}'
  689.  
  690. PBS_TABLE = 'pufirma'
  691. PBS_FIELDS = {
  692. 'subsidiary': (None, None),
  693. 'identifier': ('d2001', utils.strip_or_none, False),
  694. 'name': ('d2021', utils.strip_or_none, False),
  695. }
  696.  
  697. subsidiary = models.ForeignKey(Subsidiary, on_delete=models.PROTECT)
  698. identifier = models.CharField(max_length=80)
  699. name = models.CharField(max_length=100, null=True)
  700.  
  701. @classmethod
  702. def get_or_create(C, value, **context):
  703. identifier = utils.strip_or_none(value)
  704.  
  705. if identifier == None:
  706. return None
  707.  
  708. return C.objects.get_or_create(identifier=identifier, subsidiary=context['subsidiary'])[0]
  709.  
  710. def __unicode__(self):
  711. return self.identifier
  712.  
  713.  
  714. class Order(models.Model):
  715. class Meta:
  716. unique_together = ('subsidiary', 'prefix', 'serial')
  717.  
  718. subsidiary = models.ForeignKey(Subsidiary, on_delete=models.PROTECT)
  719. prefix = models.CharField(max_length=2, null=True)
  720. serial = models.PositiveIntegerField()
  721.  
  722. def __unicode__(self):
  723. return (self.prefix or u'') + unicode(self.serial)
  724.  
  725. @classmethod
  726. def get_or_create(C, value, **context):
  727. value = utils.strip_or_none(value)
  728.  
  729. if value == None:
  730. return None
  731.  
  732. try:
  733. serial = re.search('(\d+)$', value).group(1)
  734. except AttributeError:
  735. logger.warning("the value %s could not be parsed to a valid identifier, will not add this order." % value)
  736. return None
  737.  
  738. prefix = value.replace(serial, '')
  739.  
  740. if len(prefix) == 0:
  741. prefix = None
  742.  
  743. C.objects.get_or_create(prefix=prefix, serial=int(serial), subsidiary=context['subsidiary'])[0]
  744.  
  745.  
  746. class Receipt(models.Model):
  747. class Meta:
  748. unique_together = ('subsidiary', 'serial')
  749.  
  750. DATE_FIELD = 'date'
  751. SUBSIDIARY_FIELD = 'subsidiary'
  752. LINEAR_INDEX = True
  753. MAIN_SUBSIDIARY_ONLY = False
  754.  
  755. COUNTER_FIELD = 'kind'
  756. MATCH_FIELDS = ['price', 'price_inc_vat', 'cost', 'rounding', 'stock']
  757.  
  758. PBSID = 'serial'
  759. PBSID_FORMAT = u'{0: >18}'
  760.  
  761. PBS_TABLE = 'pukvitto'
  762. """
  763. PBS_FIELDS are descriped by a tuple containing:
  764. 0: field name
  765. 1: field name PBS
  766. 2: cast function
  767. 3: is scalar True/False
  768. """
  769. PBS_FIELDS = {
  770. 'subsidiary': (None, None, False),
  771. 'serial': ('d25762', utils.int_or_none, False),
  772. 'stock': ('d25790', Contractor.get_or_create, False),
  773. 'sign': ('d25769', Staff.get_or_create, False),
  774. 'date': ('d25764', utils.str_to_date, False),
  775. 'time': ('d25770', utils.str_to_time, False),
  776. 'kind': ('d25765', utils.strip_or_none, False),
  777. 'order': ('d25772', Order.get_or_create, False),
  778. 'contractor': ('d25761', Contractor.get_or_create, False),
  779. 'price': ('d25773', utils.float_or_none, True),
  780. 'price_inc_vat': ('d25774', utils.float_or_none, True),
  781. 'cost': ('d25775', utils.float_or_none, True),
  782. 'rounding': ('d25776', utils.float_or_none, True),
  783. }
  784.  
  785. # Identifiers
  786. subsidiary = models.ForeignKey(Subsidiary, on_delete=models.PROTECT)
  787. serial = models.PositiveIntegerField()
  788.  
  789. # Optional
  790. stock = models.ForeignKey(Contractor, null=True, related_name='stock_receipt', on_delete=models.PROTECT)
  791. sign = models.ForeignKey(Staff, null=True)
  792. date = models.DateField(null=True)
  793. time = models.TimeField(null=True)
  794. kind = models.CharField(max_length=1, null=True)
  795. order = models.ForeignKey(Order, null=True, on_delete=models.PROTECT)
  796. contractor = models.ForeignKey(Contractor, null=True, related_name='contractor_receipt', on_delete=models.PROTECT)
  797. price = models.FloatField(null=True)
  798. price_inc_vat = models.FloatField(null=True)
  799. cost = models.FloatField(null=True)
  800. rounding = models.FloatField(null=True)
  801.  
  802. @classmethod
  803. def get_or_create(C, value, **context):
  804. serial = utils.int_or_none(value)
  805.  
  806. if serial == None:
  807. return None
  808.  
  809. return C.objects.get_or_create(serial=serial, subsidiary=context['subsidiary'])[0]
  810.  
  811. def __unicode__(self):
  812. return "%s" % self.serial
  813.  
  814.  
  815. class Invoice(models.Model):
  816. class Meta:
  817. unique_together = ('subsidiary', 'serial')
  818.  
  819. INVOICE = 'F'
  820. CREDIT = 'K'
  821.  
  822. KINDS = (
  823. ('F', 'Invoice'),
  824. ('K', 'Credit'),
  825. )
  826.  
  827. DATE_FIELD = 'date'
  828. SUBSIDIARY_FIELD = 'subsidiary'
  829. LINEAR_INDEX = True
  830. MAIN_SUBSIDIARY_ONLY = False
  831.  
  832. COUNTER_FIELD = 'kind'
  833. MATCH_FIELDS = ['price', 'price_inc_vat', 'stock']
  834.  
  835. PBSID = 'serial'
  836. PBSID_FORMAT = u'{0: >18}'
  837. PBS_TABLE = 'puarkhuv'
  838. PBS_FIELDS = {
  839. 'subsidiary': (None, None),
  840. 'serial': ('d11402', utils.int_or_none, False),
  841. 'stock': ('d11439', Contractor.get_or_create, False),
  842. 'date': ('d11411', utils.str_to_date, False),
  843. 'kind': ('d11401', utils.strip_or_none, False),
  844. 'order': ('d11403', Order.get_or_create, False),
  845. 'contractor': ('d11405', Contractor.get_or_create, False),
  846. 'price': ('d11431', utils.float_or_none, True),
  847. 'price_inc_vat': ('d11433', utils.float_or_none, True),
  848. }
  849.  
  850. # Identifiers
  851. subsidiary = models.ForeignKey(Subsidiary, on_delete=models.PROTECT)
  852. serial = models.PositiveIntegerField()
  853.  
  854. # Optional
  855. stock = models.ForeignKey(Contractor, null=True, related_name='stock_invoice', on_delete=models.PROTECT)
  856. date = models.DateField(null=True)
  857. kind = models.CharField(max_length=1, choices=KINDS, null=True)
  858. order = models.ForeignKey(Order,null=True, on_delete=models.PROTECT)
  859. contractor = models.ForeignKey(Contractor, null=True, related_name='contractor_invoice', on_delete=models.PROTECT)
  860. price = models.FloatField(null=True)
  861. price_inc_vat = models.FloatField(null=True)
  862.  
  863. @classmethod
  864. def get_or_create(C, value, **context):
  865. value = utils.strip_or_none(value)
  866.  
  867. if value == None:
  868. return None
  869.  
  870. serial = re.search('(\d+)$', value).group(1)
  871. kind = value.replace(serial, '')
  872.  
  873. serial = utils.int_or_none(serial)
  874.  
  875. return C.objects.get_or_create(kind=kind, serial=serial, subsidiary=context['subsidiary'])[0]
  876.  
  877.  
  878. class Transaction(models.Model):
  879.  
  880. DATE_FIELD = 'date'
  881. SUBSIDIARY_FIELD = 'subsidiary'
  882. LINEAR_INDEX = True
  883. MAIN_SUBSIDIARY_ONLY = False
  884.  
  885. COUNTER_FIELD = 'origin'
  886. MATCH_FIELDS = ['price', 'price_inc_vat', 'cost', 'quantity', 'stock', 'receipt']
  887.  
  888. PBSID = 'pbs_identifier'
  889. PBSID_FORMAT = u'{0: >25}'
  890.  
  891. PBS_TABLE = 'puprotra'
  892.  
  893. """
  894. A map for linking intm fields to pbs fields.
  895.  
  896. Dictionary with intm field as keys and a tuple as value containing:
  897. (pbs field key, cast function, treat as scalar)
  898. """
  899. PBS_FIELDS = {
  900. 'id': (None, None),
  901. 'subsidiary': (None, None, False),
  902. 'pbs_identifier': ('d3602+d3603+d3604+d3607', utils.slip, False),
  903. 'origin': ('d3601', utils.strip_or_none, False),
  904. 'order': ('d3602', Order.get_or_create, False),
  905. 'row': ('d3603', utils.int_or_none, False),
  906. 'revision': ('d3604', utils.int_or_none, False),
  907. 'serial': ('d3607', utils.int_or_none, False),
  908. 'stock': ('d3682', Contractor.get_or_create, False),
  909. 'contractor': ('d3615', Contractor.get_or_create, False),
  910. 'sign': ('d3614', Staff.get_or_create, False),
  911. 'date': ('d3611', utils.str_to_date, False),
  912. 'time': ('d3613', utils.str_to_time, False),
  913. 'receipt': ('d3688', Receipt.get_or_create, False),
  914. 'invoice': ('d3687', Invoice.get_or_create, False),
  915. 'price': ('d3653', utils.float_or_none, True),
  916. 'cost': ('d3651', utils.float_or_none, True),
  917. 'price_inc_vat': ('d3666', utils.float_or_none, True),
  918. 'quantity': ('d3631', utils.float_or_none, True),
  919. 'item': ('d3605', Item.get_or_create, False),
  920. }
  921.  
  922. # PBS Mapping
  923. pbs_identifier = models.CharField(max_length=25)
  924.  
  925. # Identifiers
  926. subsidiary = models.ForeignKey(Subsidiary, on_delete=models.PROTECT)
  927. order = models.ForeignKey(Order, null=True, on_delete=models.PROTECT)
  928. row = models.PositiveIntegerField(null=True)
  929. revision = models.PositiveIntegerField(null=True)
  930. serial = models.PositiveIntegerField(null=True)
  931.  
  932. # Required
  933. origin = models.CharField(max_length=1)
  934. stock = models.ForeignKey(Contractor, related_name='stock_transaction', null=True, on_delete=models.PROTECT)
  935. sign = models.ForeignKey(Staff, on_delete=models.PROTECT)
  936. date = models.DateField()
  937. time = models.TimeField()
  938.  
  939. # Optional
  940. receipt = models.ForeignKey(Receipt, null=True, on_delete=models.PROTECT)
  941. contractor = models.ForeignKey(Contractor, null=True, related_name='contractor_transaction', on_delete=models.PROTECT)
  942. invoice = models.ForeignKey(Invoice, null=True, on_delete=models.PROTECT)
  943. price = models.FloatField(null=True)
  944. price_inc_vat = models.FloatField(null=True)
  945. cost = models.FloatField(null=True)
  946. quantity = models.FloatField(null=True)
  947. item = models.ForeignKey(Item, null=True, on_delete=models.PROTECT)
  948.  
  949. def __unicode__(self):
  950. return "%s %s %s %s" % (self.date, self.time, self.stock, self.item)
  951.  
  952.  
  953. class Updated(models.Model):
  954. date = models.DateField(unique=True)
  955. timestamp = models.DateTimeField(auto_now=True)
Runtime error #stdin #stdout #stderr 0.01s 9072KB
stdin
Standard input is empty
stdout
Standard output is empty
stderr
Traceback (most recent call last):
  File "prog.py", line 9, in <module>
ImportError: No module named django.db