"""
Represents a PBS database layout and provides algorithms to sync PBS data
with intm model.
 
API: Sync datewise by passing a date to sync function.
 
"""
from __future__ import absolute_import
from django.db import models
from django.db.utils import IntegrityError, DataError
from . import utils, pervasive
import re
import datetime
import sys
import logging
import threading
 
logger = logging.getLogger(__name__)
 
 
class SyncError(Exception):
    """
    General class for all exceptions in this module
    """
    def __init__(self, message):
        self.message = message
 
    def __str__(self):
        return repr(self.message)
 
 
def sync(date):
    for s in Subsidiary.objects.all():
        for m in [Transaction, Receipt, Invoice, Staff, Contractor]:
            Sync(m, s, date).sync()
 
    if date != datetime.date.today():
        Updated.objects.get_or_create(date=date)[0].save()
 
 
class Sync(object):
 
    def __init__(self, model, subsidiary, date):
 
        self.pbs = PBS(
                pk = model.PBSID,
                pk_format = model.PBSID_FORMAT,
                fieldmap = model.PBS_FIELDS,
                table_name = model.PBS_TABLE,
                dsn = (subsidiary.primary_dsn, subsidiary.replica_dsn),
                date_filter = {model.DATE_FIELD: date}
                )
 
        self.date = date
        self.model = model
        self.subsidiary = subsidiary
 
        self._intm = model.objects.order_by(model.PBSID)
 
        if self.model.SUBSIDIARY_FIELD:
            self._intm = self._intm.filter(**{self.model.SUBSIDIARY_FIELD:self.subsidiary})
 
        self.intm = self._intm
 
        self.set_date(self.date)
 
 
    def sync(self):
        logger.info("sync: %s, %s, %s"
                % (self.model.__name__, self.subsidiary, self.date))
 
        if not self.subsidiary.main and self.model.MAIN_SUBSIDIARY_ONLY:
            logger.info("skipping: %s only syncs with main subsidiary" % self.model.__name__)
            return
 
        self.clean()
        self.pair()
        self.fill(1)
 
 
    def set_date(self, date):
        if self.model.DATE_FIELD:
            self.pbs.set_query({self.model.DATE_FIELD:date})
            self.intm = self._intm.filter(**{self.model.DATE_FIELD:date})
 
 
    def clean(self):
        """
        Filling in dates on entries the has None on expected date fields.
        """
        if not self.model.DATE_FIELD:
            return
 
        self.set_date(None)
 
        logger.info("found %s undated rows on %s/%s" % (len(self.intm), self.subsidiary, self.model.__name__))
 
        while len(self.intm):
            pbsid_range = next(self.chunkwise(self.intm, self.pbs.query_length(), True))
 
            self.update(pbsid_range, by_queryset=True)
 
        self.set_date(self.date)
 
 
    def pair(self):
        """
        Pulls entries from PBS and adds to intm and, if necessary, deletes entries from intm that doesn't
        exist in PBS.
        """
 
        if not self.model.LINEAR_INDEX:
            return 0
 
        logger.info("running add on %s/%s" % (self.subsidiary, self.model.__name__))
 
        found_rows = 1
        s = 0
 
        while found_rows:
 
            pbsid_range, overflow = self.first_mismatch(threshold=self.pbs.query_length())
 
            if overflow:
                intm = self.intm_from_range(pbsid_range)
 
                logger.warning("deleting %s unmatched rows in %s (first_pbsid, last_pbsid)" % (len(intm), str(pbsid_range)))
 
                intm.delete()
 
                return self.pair(s=s)
 
            update_duplicates = pbsid_range[1] != None
            found_rows, added_rows, last_pbsid = self.add(pbsid_range, update_duplicates=update_duplicates)
 
            s += added_rows
 
        return s
 
 
    def fill(self, fields):
        """
        Updates entries where aggregation doesn't match.
        """
        for field in self.model.MATCH_FIELDS[0:fields]:
 
            count = not self.model.PBS_FIELDS[field][2]
 
            if self.model.LINEAR_INDEX:
                intm_value, pbs_value = self.diff(
                        field = field,
                        pbsid_range = (None, None),
                        by_queryset = False,
                        count = count,
                        exclude_empty = True,
                        )
 
                if abs(intm_value - pbs_value) < 0.01:
                    continue
 
            for pbsid_range in self.chunkwise(intm=self.intm, step=self.pbs.query_length(), closed=True):
                intm_value, pbs_value = self.diff(
                        field = field,
                        pbsid_range = pbsid_range,
                        by_queryset = True,
                        count = count,
                        exclude_empty = True,
                        )
 
                if abs(intm_value - pbs_value) < 0.01 or (count and intm_value > pbs_value):
                    continue
 
                self.update(pbsid_range, by_queryset=True)
 
 
    def first_mismatch(self, threshold):
        """
        Finds and returns the first mismatch between PBS and intm and returns the position and mismatch type
        in the format ((first_pbsid, last_pbsid), overflow).
 
        first/last pbsid gives the range where the mismatch is located and overflow is True if the number of
        elements in intm is greater than in pbs.
        """
 
        min_index = 0
        max_index = len(self.intm) - 1
 
        if max_index < 0:
            return ((None, None), False)
 
        # set arguments for diff to strictly count elements
        diff_args = {
            'field': self.model.COUNTER_FIELD,
            'by_queryset': False,
            'count': True,
            'exclude_empty': False,
            }
 
        # Upper edge case: Entries are synced up to last intm entry
        pbsid_range = (None, self.pbs.pbsid_from_intm(self.intm[max_index]))
        intm_count, pbs_count = self.diff(pbsid_range=pbsid_range, **diff_args)
 
        if abs(intm_count - pbs_count) < 0.01:
            return ((pbsid_range[1], None), False)
 
        # Lower edge case: Entries are unsynced before first intm entry
        pbsid_range = (None, self.pbs.pbsid_from_intm(self.intm[min_index]))
        intm_count, pbs_count = self.diff(pbsid_range=pbsid_range, **diff_args)
 
        if abs(intm_count - pbs_count) >= 0.01:
            return ((None, pbsid_range[1]), intm_count > pbs_count)
 
        while True:
            mid_index = min_index + (max_index - min_index)/2
 
            pbsid_range = (None, self.pbs.pbsid_from_intm(self.intm[mid_index]))
            intm_count, pbs_count = self.diff(pbsid_range=pbsid_range, **diff_args)
 
            diff = abs(intm_count - pbs_count) >= 0.01
 
            min_index, max_index = (min_index, mid_index) if diff else (mid_index, max_index)
 
            if diff and (max_index - min_index <= threshold):
                return (
                        (self.pbs.pbsid_from_intm(self.intm[min_index]), self.pbs.pbsid_from_intm(self.intm[max_index])),
                        intm_count > pbs_count,
                        )
 
 
    def add(self, pbsid_range, update_duplicates):
        """
        Adds a result set to intm and returns number of inserted items.
 
        Utilizes django's bulk_create method which will raise IntegrityError
        if unique fields are violated.
 
        If the length of the created rows matches CHUNK_SIZE, add will assume that
        next add could be a full chunk too and spawns off a thread based on that
        assumption.
        """
 
        logger.debug("fetching %s (first_pbsid, last_pbsid)" % str(pbsid_range))
 
        result = self.pbs.fetch_by_range(pbsid_range, include_endpoints=False)
 
        logger.debug("adding %s rows, %s (first_pbsid, last_pbsid), Filter duplicates:%s"
                % (len(result), str(pbsid_range), update_duplicates))
 
        bulk_create = []
 
        r = []
        count = 0
 
        for r in result:
            fields = self.cast_row(r)
 
            if update_duplicates:
                count = self.intm_from_range(
                        pbsid_range=(fields[self.model.PBSID], fields[self.model.PBSID]),
                        intm=self.model.objects.filter(subsidiary=self.subsidiary)
                        ).update(**fields)
 
            if count == 0:
                bulk_create.append(self.model(subsidiary=self.subsidiary, **fields))
 
 
        self.model.objects.bulk_create(bulk_create)
 
        self.intm.update()
 
        logger.debug("added %s/%s (added/result) rows, %s (first_pbsid, last_pbsid)"
                % (len(bulk_create), len(result), str(pbsid_range)))
 
        return len(result), len(bulk_create), r[self.model.PBSID] if r else None
 
 
    def update(self, pbsid_range, by_queryset):
        """
        Updates the fields in a query set by requesting the identifiers in intm from PBS
        and return the number of rows matched (not changed).
 
        """
        logger.debug("fetching updates %s (first_pbsid, last_pbsid)" % str(pbsid_range))
        intm = self.intm_from_range(pbsid_range, self.intm)
 
        if by_queryset:
            result = self.pbs.fetch_by_queryset(intm)
        else:
            result = self.pbs.fetch_by_range(pbsid_range, include_endpoints=True)
 
        s = 0
        for r in result:
            fields = self.cast_row(r)
            count = self.intm_from_range((fields[self.model.PBSID], fields[self.model.PBSID]), self.intm).update(**fields)
 
            if count > 1:
                logger.warning("updated %s rows on id %s" % (count, fields[self.model.PBSID]))
            if count < 1:
                pass
 
            s += count
 
        self.intm.update()
 
        logger.debug("updated %s/%s (updated/data) rows, %s (first_pbsid, last_pbsid)"
            % (s, len(result), str(pbsid_range)))
 
        return s
 
 
    def diff(self, field, pbsid_range, by_queryset, count, exclude_empty):
        intm = self.intm_from_range(pbsid_range, self.intm)
 
        if by_queryset:
            pbs_value = self.pbs.aggregate_by_queryset(field, intm, count, exclude_empty)
        else:
            pbs_value = self.pbs.aggregate_by_range(field, pbsid_range, count, exclude_empty)
 
        if count:
            if exclude_empty:
                intm_value = len(intm.exclude(**{field: None}))
            else:
                intm_value = len(intm)
        else:
            intm_value = intm.aggregate(models.Sum(field))[field + '__sum']
 
        intm_value = intm_value or 0
        pbs_value = pbs_value or 0
        diff = intm_value - pbs_value
 
        logger.debug("diff: %s-%s=%s (intm - pbs = diff)" % (intm_value, pbs_value, diff))
 
        return intm_value, pbs_value
 
 
    def chunkwise(self, intm, step, closed):
        min_index = 0
        max_index = len(intm) - 1
 
        if not closed:
            min_index -= 1
            max_index += 1
 
        if max_index < 0:
            raise StopIteration
 
        """
        If step evaluates to false, the entire queryset should be returned.
        Setting step to max_index will render a full closed interval
        while adding 2 will render a full open interval (None, None)
        """
        step = step or (max_index + (0 if closed else 2))
 
        i = min_index
 
        while i <= max_index:
            j = min(i + step, max_index)
 
            first_pbsid = None
            if i != -1:
                first_pbsid = self.pbs.pbsid_from_intm(intm[i])
 
            last_pbsid = None
            if j != len(intm):
                last_pbsid = self.pbs.pbsid_from_intm(intm[j])
 
            yield (first_pbsid, last_pbsid)
 
            i += step + (1 if closed else 0)
 
 
    def intm_from_range(self, pbsid_range, intm):
        f = {}
 
        if not pbsid_range[0] == None:
            f[self.model.PBSID + '__gte'] = pbsid_range[0]
 
        if not pbsid_range[1] == None:
            f[self.model.PBSID + '__lte'] = pbsid_range[1]
 
        return intm.filter(**f)
 
 
    def cast(self, f, v):
        try:
            c = self.model.PBS_FIELDS[f][1](v, subsidiary=self.subsidiary)
        except DataError as e:
            logger.error("bad identifier %s for field %s/%s (cast: %s)" %
                    (v, f, self.model.PBS_FIELDS[f][0], self.model.PBS_FIELDS[f][1].__name__))
            raise e
 
        return c
 
 
    def cast_row(self, r):
        return {c:self.cast(c, r[c]) for c in r}
 
 
class PBS(object):
 
    CHUNK_SIZE = 1000
 
    def __init__(self, pk, pk_format, fieldmap, table_name, dsn, date_filter):
        self.fieldmap = fieldmap
        self.table_name = table_name
        self.pk = pk
        self.pk_format = pk_format
 
        self.__dsn = None
        self.__query = None
        self.__query_dateless = None
 
        self.__cache = {}
        self.__thread_pool = {}
 
        self.set_dsn(dsn, date_filter)
        self.set_query(date_filter)
 
 
    def fetch_by_queryset(self, intm):
        """
        Fetch from PBS based on intm-queryset by adding "in (id_1, id_2, ...)" to the query.
        """
        q = self.__query % self.query_fields()
        q += " and %s" % (self.query_filter(intm))
        q += " order by %s asc" % self.pk
 
        result = pervasive.query(self.__dsn, q)
 
        logger.debug("fetched %s/%s rows" % (len(result), len(intm)))
 
        return result
 
 
    def fetch_by_range(self, pbsid_range, include_endpoints):
        """
        Returns all items in the PBS table within pbsid_range.
 
        """
 
        cached_result = self.get_cached_fetch(pbsid_range)
 
        if cached_result:
            return cached_result
 
        q = self.__query % "top %i %s" % (PBS.CHUNK_SIZE, self.query_fields())
 
        if pbsid_range[0] != None:
            q += " and %s >%s '%s'" % (self.pk, ('=' if include_endpoints else ''), pbsid_range[0])
 
        if pbsid_range[1] != None:
            q += " and %s <%s '%s'" % (self.pk, ('=' if include_endpoints else ''), pbsid_range[1])
 
        q += " order by %s asc" % self.pk
 
        result = pervasive.query(self.__dsn, q)
 
        logger.debug("fetched %s rows on %s (first_pbsid, last_pbsid)" % (len(result), str(pbsid_range)))
 
        return result
 
 
    def aggregate_by_queryset(self, field, intm, count, exclude_empty):
        if count:
            aggregate = 'count'
            cast = int
        else:
            aggregate = 'sum'
            cast = self.fieldmap[field][1]
 
        q = self.__query % ("%s(%s) as c" % (aggregate, self.fieldmap[field][0]))
 
        q += " and %s" % self.query_filter(intm)
 
        if exclude_empty:
            q += " and %s <> ''" % self.fieldmap[field][0]
 
        try:
            s = cast(pervasive.query(self.__dsn, q)[0]['c'])
        except IndexError:
            s = 0
 
        logger.debug("aggregate (%s) field %s = %s (len(intm)=%s)"
                % (aggregate, field, s, len(intm)))
 
        return s
 
 
    def aggregate_by_range(self, field, pbsid_range, count, exclude_empty):
 
        if count:
            aggregate = 'count'
            cast = int
        else:
            aggregate = 'sum'
            cast = self.fieldmap[field][1]
 
        q = self.__query % ("%s(%s) as c" % (aggregate, self.fieldmap[field][0]))
 
        if pbsid_range[0] != None:
            q += " and %s >= '%s'" % (self.fieldmap[self.pk][0], pbsid_range[0])
 
        if pbsid_range[1] != None:
            q += " and %s <= '%s'" % (self.fieldmap[self.pk][0], pbsid_range[1])
 
        if exclude_empty:
            q += " and %s <> ''" % self.fieldmap[field][0]
 
        try:
            s = cast(pervasive.query(self.__dsn, q)[0]['c'])
        except IndexError:
            s = 0
 
        logger.debug("aggregate (%s) field %s = %s, %s (first_pbsid, last_pbsid)" % (aggregate, field, s, str(pbsid_range)))
 
        return s
 
 
    def cache_fetch(self, pbsid_range):
        logger.debug("caching rows on %s (first_pbsid, last_pbsid)" % str(pbsid_range))
 
        result = self.fetch_by_range(pbsid_range, include_endpoints=False)
 
        self.__cache['fetch'] = {
                'pbsid_range': pbsid_range,
                'result': result,
                }
 
        logger.debug("cached %s rows on %s (first_pbsid, last_pbsid)" % (len(result), str(pbsid_range)))
 
 
    def get_cached_fetch(self, pbsid_range):
        result = []
 
        if 'fetch' in self.__thread_pool and self.__thread_pool['fetch'].is_alive():
            logger.debug("waiting for thread %s" % self.__thread_pool['fetch'])
            if self.__thread_pool['fetch'] != threading.current_thread():
                self.__thread_pool['fetch'].join()
                logger.debug("joined with thread %s" % self.__thread_pool['fetch'])
            else:
                logger.debug("skipped join with thread %s" % self.__thread_pool['fetch'])
 
        try:
            if self.__cache['fetch']['pbsid_range'] == pbsid_range:
                result = self.__cache['fetch']['result']
        except KeyError:
            pass
 
        logger.debug("got cache %s rows on %s (first_pbsid, last_pbsid)" % (len(result), str(pbsid_range)))
 
        return result
 
 
    def pbsid_from_intm(self, intm):
        pbsid = self.pk_format.format(getattr(intm, self.pk))
        return pbsid
 
 
    def range_from_intm(self, intm):
        return (self.pbsid_from_intm(intm.first()), self.pk_from_intm(intm.last()))
 
 
    def set_query(self, date):
        date_field = date.keys()[0]
        date = date[date_field]
 
        query = "select %s " + "from %s" % self.table_name
 
        if date_field != None and date !=None:
            self.__query = query + " where %s='%s'" % (
                    self.fieldmap[date_field][0],
                    utils.date_to_str(date) if date else '',
                    )
        else:
            self.__query = query + " where 1=1"
 
        logger.debug("query skeleton set to '%s'" % self.__query)
 
 
    def set_dsn(self, dsn, date):
        date_field = date.keys()[0]
        date = date[date_field]
 
        self.__dsn = (
                dsn[1]
                if date != datetime.date.today()
                and dsn[1]
                else dsn[0]
                )
 
 
    def query_fields(self):
        return ','.join("%s as '%s'"
                % (self.fieldmap[f][0], f) for f in self.fieldmap if self.fieldmap[f][0])
 
 
    def query_filter(self, intm):
        identifiers = [self.pk_format.format(getattr(i, self.pk)) for i in intm]
 
        return "%s in ('%s')" % (
                self.fieldmap[self.pk][0],
                "','".join(identifiers)
                )
 
 
    def query_length(self):
        return (pervasive.MAX_QUERY_LENGTH - 800)/(len(self.pk_format.format(0)) + 3)
 
 
class Subsidiary(models.Model):
    name = models.CharField(max_length=40, unique=True)
    main = models.BooleanField(default=False)
    primary_dsn = models.CharField(max_length=10, unique=True)
    replica_dsn = models.CharField(max_length=10, unique=True, null=True, blank=True)
 
    def __unicode__(self):
        return self.name
 
 
class Item(models.Model):
    identifier = models.CharField(max_length=25, unique=True)
 
    @classmethod
    def get_or_create(C, value, **context):
        identifier = utils.strip_or_none(value)
 
        if identifier == None:
            return None
 
        return C.objects.get_or_create(identifier=identifier)[0]
 
    def __unicode__(self):
        return self.identifier
 
 
class Staff(models.Model):
    class Meta:
        ordering = ['name', 'identifier']
 
    COUNTER_FIELD = 'identifier'
    MATCH_FIELDS = ['name', 'display_name']
    DATE_FIELD = None
    SUBSIDIARY_FIELD = None
    LINEAR_INDEX = False
    MAIN_SUBSIDIARY_ONLY = True
 
    PBSID = 'identifier'
    PBSID_FORMAT = u'{0: <4}'
 
    PBS_TABLE = 'pupers'
    PBS_FIELDS = {
            'identifier': ('d2601', utils.strip_or_none, False),
            'name': ('d2621', utils.strip_or_none, False),
            'display_name': ('d2681', utils.strip_or_none, False),
            'is_seller': ('d2701', utils.eight_bit_to_bool, False),
            }
 
    identifier = models.CharField(max_length=4, unique=True)
    display_name = models.CharField(max_length=20, null=True)
    name = models.CharField(max_length=40, null=True)
    is_seller = models.NullBooleanField()
 
    @classmethod
    def get_or_create(C, value, **context):
        identifier = utils.strip_or_none(value)
 
        if identifier == None:
            return None
 
        return C.objects.get_or_create(identifier=identifier)[0]
 
    def __unicode__(self):
        return u"%s - %s" % (self.identifier, self.name)
 
 
class Contractor(models.Model):
    class Meta:
        unique_together = ('subsidiary', 'identifier')
 
    COUNTER_FIELD = 'identifier'
    MATCH_FIELDS = ['name']
    DATE_FIELD = None
    SUBSIDIARY_FIELD = 'subsidiary'
    LINEAR_INDEX = False
    MAIN_SUBSIDIARY_ONLY = False
 
    PBSID = 'identifier'
    PBSID_FORMAT = u'{0: <10}'
 
    PBS_TABLE = 'pufirma'
    PBS_FIELDS = {
            'subsidiary': (None, None),
            'identifier': ('d2001', utils.strip_or_none, False),
            'name': ('d2021', utils.strip_or_none, False),
            }
 
    subsidiary = models.ForeignKey(Subsidiary, on_delete=models.PROTECT)
    identifier = models.CharField(max_length=80)
    name = models.CharField(max_length=100, null=True)
 
    @classmethod
    def get_or_create(C, value, **context):
        identifier = utils.strip_or_none(value)
 
        if identifier == None:
            return None
 
        return C.objects.get_or_create(identifier=identifier, subsidiary=context['subsidiary'])[0]
 
    def __unicode__(self):
        return self.identifier
 
 
class Order(models.Model):
    class Meta:
        unique_together = ('subsidiary', 'prefix', 'serial')
 
    subsidiary = models.ForeignKey(Subsidiary, on_delete=models.PROTECT)
    prefix = models.CharField(max_length=2, null=True)
    serial = models.PositiveIntegerField()
 
    def __unicode__(self):
        return (self.prefix or u'') + unicode(self.serial)
 
    @classmethod
    def get_or_create(C, value, **context):
        value = utils.strip_or_none(value)
 
        if value == None:
            return None
 
        try:
            serial = re.search('(\d+)$', value).group(1)
        except AttributeError:
            logger.warning("the value %s could not be parsed to a valid identifier, will not add this order." % value)
            return None
 
        prefix = value.replace(serial, '')
 
        if len(prefix) == 0:
            prefix = None
 
        C.objects.get_or_create(prefix=prefix, serial=int(serial), subsidiary=context['subsidiary'])[0]
 
 
class Receipt(models.Model):
    class Meta:
        unique_together = ('subsidiary', 'serial')
 
    DATE_FIELD = 'date'
    SUBSIDIARY_FIELD = 'subsidiary'
    LINEAR_INDEX = True
    MAIN_SUBSIDIARY_ONLY = False
 
    COUNTER_FIELD = 'kind'
    MATCH_FIELDS = ['price', 'price_inc_vat', 'cost', 'rounding', 'stock']
 
    PBSID = 'serial'
    PBSID_FORMAT = u'{0: >18}'
 
    PBS_TABLE = 'pukvitto'
    """
    PBS_FIELDS are descriped by a tuple containing:
    0: field name
    1: field name PBS
    2: cast function
    3: is scalar True/False
    """
    PBS_FIELDS = {
            'subsidiary': (None, None, False),
            'serial': ('d25762', utils.int_or_none, False),
            'stock': ('d25790', Contractor.get_or_create, False),
            'sign': ('d25769', Staff.get_or_create, False),
            'date': ('d25764', utils.str_to_date, False),
            'time': ('d25770', utils.str_to_time, False),
            'kind': ('d25765', utils.strip_or_none, False),
            'order': ('d25772', Order.get_or_create, False),
            'contractor': ('d25761', Contractor.get_or_create, False),
            'price': ('d25773', utils.float_or_none, True),
            'price_inc_vat': ('d25774', utils.float_or_none, True),
            'cost': ('d25775', utils.float_or_none, True),
            'rounding': ('d25776', utils.float_or_none, True),
            }
 
    # Identifiers
    subsidiary = models.ForeignKey(Subsidiary, on_delete=models.PROTECT)
    serial = models.PositiveIntegerField()
 
    # Optional
    stock = models.ForeignKey(Contractor, null=True, related_name='stock_receipt', on_delete=models.PROTECT)
    sign = models.ForeignKey(Staff, null=True)
    date = models.DateField(null=True)
    time = models.TimeField(null=True)
    kind = models.CharField(max_length=1, null=True)
    order = models.ForeignKey(Order, null=True, on_delete=models.PROTECT)
    contractor = models.ForeignKey(Contractor, null=True, related_name='contractor_receipt', on_delete=models.PROTECT)
    price = models.FloatField(null=True)
    price_inc_vat = models.FloatField(null=True)
    cost = models.FloatField(null=True)
    rounding = models.FloatField(null=True)
 
    @classmethod
    def get_or_create(C, value, **context):
        serial = utils.int_or_none(value)
 
        if serial == None:
            return None
 
        return C.objects.get_or_create(serial=serial, subsidiary=context['subsidiary'])[0]
 
    def __unicode__(self):
        return "%s" % self.serial
 
 
class Invoice(models.Model):
    class Meta:
        unique_together = ('subsidiary', 'serial')
 
    INVOICE = 'F'
    CREDIT = 'K'
 
    KINDS = (
            ('F', 'Invoice'),
            ('K', 'Credit'),
            )
 
    DATE_FIELD = 'date'
    SUBSIDIARY_FIELD = 'subsidiary'
    LINEAR_INDEX = True
    MAIN_SUBSIDIARY_ONLY = False
 
    COUNTER_FIELD = 'kind'
    MATCH_FIELDS = ['price', 'price_inc_vat', 'stock']
 
    PBSID = 'serial'
    PBSID_FORMAT = u'{0: >18}'
    PBS_TABLE = 'puarkhuv'
    PBS_FIELDS = {
            'subsidiary': (None, None),
            'serial': ('d11402', utils.int_or_none, False),
            'stock': ('d11439', Contractor.get_or_create, False),
            'date': ('d11411', utils.str_to_date, False),
            'kind': ('d11401', utils.strip_or_none, False),
            'order': ('d11403', Order.get_or_create, False),
            'contractor': ('d11405', Contractor.get_or_create, False),
            'price': ('d11431', utils.float_or_none, True),
            'price_inc_vat': ('d11433', utils.float_or_none, True),
            }
 
    # Identifiers
    subsidiary = models.ForeignKey(Subsidiary, on_delete=models.PROTECT)
    serial = models.PositiveIntegerField()
 
    # Optional
    stock = models.ForeignKey(Contractor, null=True, related_name='stock_invoice', on_delete=models.PROTECT)
    date = models.DateField(null=True)
    kind = models.CharField(max_length=1, choices=KINDS, null=True)
    order = models.ForeignKey(Order,null=True, on_delete=models.PROTECT)
    contractor = models.ForeignKey(Contractor, null=True, related_name='contractor_invoice', on_delete=models.PROTECT)
    price = models.FloatField(null=True)
    price_inc_vat = models.FloatField(null=True)
 
    @classmethod
    def get_or_create(C, value, **context):
        value = utils.strip_or_none(value)
 
        if value == None:
            return None
 
        serial = re.search('(\d+)$', value).group(1)
        kind = value.replace(serial, '')
 
        serial = utils.int_or_none(serial)
 
        return C.objects.get_or_create(kind=kind, serial=serial, subsidiary=context['subsidiary'])[0]
 
 
class Transaction(models.Model):
 
    DATE_FIELD = 'date'
    SUBSIDIARY_FIELD = 'subsidiary'
    LINEAR_INDEX = True
    MAIN_SUBSIDIARY_ONLY = False
 
    COUNTER_FIELD = 'origin'
    MATCH_FIELDS = ['price', 'price_inc_vat', 'cost', 'quantity', 'stock', 'receipt']
 
    PBSID = 'pbs_identifier'
    PBSID_FORMAT = u'{0: >25}'
 
    PBS_TABLE = 'puprotra'
 
    """
    A map for linking intm fields to pbs fields.
 
    Dictionary with intm field as keys and a tuple as value containing:
    (pbs field key, cast function, treat as scalar)
    """
    PBS_FIELDS = {
            'id': (None, None),
            'subsidiary': (None, None, False),
            'pbs_identifier': ('d3602+d3603+d3604+d3607', utils.slip, False),
            'origin': ('d3601', utils.strip_or_none, False),
            'order': ('d3602', Order.get_or_create, False),
            'row': ('d3603', utils.int_or_none, False),
            'revision': ('d3604', utils.int_or_none, False),
            'serial': ('d3607', utils.int_or_none, False),
            'stock': ('d3682', Contractor.get_or_create, False),
            'contractor': ('d3615', Contractor.get_or_create, False),
            'sign': ('d3614', Staff.get_or_create, False),
            'date': ('d3611', utils.str_to_date, False),
            'time': ('d3613', utils.str_to_time, False),
            'receipt': ('d3688', Receipt.get_or_create, False),
            'invoice': ('d3687', Invoice.get_or_create, False),
            'price': ('d3653', utils.float_or_none, True),
            'cost': ('d3651', utils.float_or_none, True),
            'price_inc_vat': ('d3666', utils.float_or_none, True),
            'quantity': ('d3631', utils.float_or_none, True),
            'item': ('d3605', Item.get_or_create, False),
            }
 
    # PBS Mapping
    pbs_identifier = models.CharField(max_length=25)
 
    # Identifiers
    subsidiary = models.ForeignKey(Subsidiary, on_delete=models.PROTECT)
    order = models.ForeignKey(Order, null=True, on_delete=models.PROTECT)
    row = models.PositiveIntegerField(null=True)
    revision = models.PositiveIntegerField(null=True)
    serial = models.PositiveIntegerField(null=True)
 
    # Required
    origin = models.CharField(max_length=1)
    stock = models.ForeignKey(Contractor, related_name='stock_transaction', null=True, on_delete=models.PROTECT)
    sign = models.ForeignKey(Staff, on_delete=models.PROTECT)
    date = models.DateField()
    time = models.TimeField()
 
    # Optional
    receipt = models.ForeignKey(Receipt, null=True, on_delete=models.PROTECT)
    contractor = models.ForeignKey(Contractor, null=True, related_name='contractor_transaction', on_delete=models.PROTECT)
    invoice = models.ForeignKey(Invoice, null=True, on_delete=models.PROTECT)
    price = models.FloatField(null=True)
    price_inc_vat = models.FloatField(null=True)
    cost = models.FloatField(null=True)
    quantity = models.FloatField(null=True)
    item = models.ForeignKey(Item, null=True, on_delete=models.PROTECT)
 
    def __unicode__(self):
        return "%s %s %s %s" % (self.date, self.time, self.stock, self.item)
 
 
class Updated(models.Model):
    date = models.DateField(unique=True)
    timestamp = models.DateTimeField(auto_now=True)