Skip to content

Commit

Permalink
Merge pull request #582 from happycube/chad-120520
Browse files Browse the repository at this point in the history
Phase 1 of logging
  • Loading branch information
happycube authored Dec 20, 2020
2 parents dc2f777 + 2de2eef commit ca68709
Show file tree
Hide file tree
Showing 2 changed files with 101 additions and 38 deletions.
44 changes: 38 additions & 6 deletions ld-decode
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,21 @@ from datetime import datetime
import getopt
import io
from io import BytesIO
import logging
import os
import signal
import sys
import argparse
import json
import traceback

from lddecode.core import *
from lddecode.utils import *
# Add lddecode core modules to path
module_path = os.path.abspath(os.path.join('lddecode'))
if module_path not in sys.path:
sys.path.append(module_path)

from core import *
from utils import *

options_epilog = """FREQ can be a bare number in MHz, or a number with one of the case-insensitive suffixes Hz, kHz, MHz, GHz, fSC (meaning NTSC) or fSCPAL."""
parser = argparse.ArgumentParser(description='Extracts audio and video from raw RF laserdisc captures', epilog=options_epilog)
Expand Down Expand Up @@ -82,10 +88,34 @@ except ValueError as e:

system = 'PAL' if args.pal else 'NTSC'

# Configure logger to write to a file
# Modified from https://docs.python.org/3.8/howto/logging-cookbook.html#logging-cookbook

logger = logging.getLogger('lddecode')
logger.setLevel(logging.DEBUG)

# Delete old logfile if it exists
try:
os.unlink(outname + '.log')
except:
pass

logger_file = logging.FileHandler(outname + '.log')
logger_file.setLevel(logging.DEBUG)

logger_fileformatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger_file.setFormatter(logger_fileformatter)

logger_stderr = logging.StreamHandler()
logger_stderr.setLevel(logging.INFO)

logger.addHandler(logger_stderr)
logger.addHandler(logger_file)

# Wrap the LDdecode creation so that the signal handler is not taken by sub-threads,
# allowing SIGINT/control-C's to be handled cleanly
original_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN)
ldd = LDdecode(filename, outname, loader, analog_audio = 0 if args.daa else 44.100, digital_audio = not args.noefm, system=system, doDOD = not args.nodod, threads=args.threads, extra_options=extra_options)
ldd = LDdecode(filename, outname, loader, logger, est_frames=req_frames, analog_audio = 0 if args.daa else 44.100, digital_audio = not args.noefm, system=system, doDOD = not args.nodod, threads=args.threads, extra_options=extra_options)
signal.signal(signal.SIGINT, original_sigint_handler)

if args.start_fileloc != -1:
Expand Down Expand Up @@ -128,18 +158,19 @@ while not done and ldd.fields_written < (req_frames * 2):
try:
f = ldd.readfield()
except KeyboardInterrupt as kbd:
print("Terminated, saving JSON and exiting", file=sys.stderr)
print("\nTerminated, saving JSON and exiting", file=sys.stderr)
write_json(ldd, outname)
ldd.close()
exit(1)
except Exception as err:
print("ERROR - please paste the following into a bug report:", file=sys.stderr)
print("\nERROR - please paste the following into a bug report:", file=sys.stderr)
print("current sample:", ldd.fdoffset, file=sys.stderr)
print("arguments:", args, file=sys.stderr)
print("Exception:", err, " Traceback:", file=sys.stderr)
traceback.print_tb(err.__traceback__)
write_json(ldd, outname)
ldd.close()
logger_file.flush()
exit(1)

if f is None or (args.ignoreleadout == False and ldd.leadOut == True):
Expand All @@ -151,6 +182,7 @@ while not done and ldd.fields_written < (req_frames * 2):
#print('write json')
write_json(ldd, outname)

print("saving JSON and exiting", file=sys.stderr)
print("\nCompleted: saving JSON and exiting", file=sys.stderr)
write_json(ldd, outname)
logger_file.flush()
ldd.close()
95 changes: 63 additions & 32 deletions lddecode/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,9 @@
# If not running Anaconda, we don't care that mkl doesn't exist.
pass

logging.getLogger(__name__).setLevel(logging.DEBUG)
# XXX: This is a hack so that logging is treated the same way in both this
# and ld-decode. Probably should just bring all logging in here...
logger = None

def calclinelen(SP, mult, mhz):
if type(mhz) == str:
Expand Down Expand Up @@ -1050,7 +1052,7 @@ def dequeue(self):

if 'MTF' not in item or 'demod' not in item:
# This shouldn't happen, but was observed by Simon on a decode
logging.error('incomplete demodulated block placed on queue, block #%d', blocknum)
logger.error('incomplete demodulated block placed on queue, block #%d', blocknum)
self.q_in.put((blocknum, self.blocks[blocknum], self.currentMTF))
self.lock.release()
continue
Expand Down Expand Up @@ -1091,7 +1093,7 @@ def read(self, begin, length, MTF=0, dodemod=True):
return rv

while need_blocks is not None and len(need_blocks):
time.sleep(.005) # A crude busy loop
time.sleep(.001) # A crude busy loop
need_blocks = self.doread(toread, MTF)

if need_blocks is None:
Expand Down Expand Up @@ -1190,7 +1192,7 @@ def downscale_audio(audio, lineinfo, rf, linecount, timeoffset = 0, freq = 48000
else:
# TBC failure can cause this (issue #389)
if failed == False:
logging.warning("Analog audio processing error, muting samples")
logger.warning("Analog audio processing error, muting samples")

failed = True

Expand Down Expand Up @@ -1774,7 +1776,7 @@ def compute_linelocs(self):

self.rawpulses = self.getpulses()
if self.rawpulses is None or len(self.rawpulses) == 0:
logging.error("Unable to find any sync pulses, jumping one second")
logger.error("Unable to find any sync pulses, jumping one second")
return None, None, int(self.rf.freq_hz)

self.validpulses = validpulses = self.refinepulses()
Expand All @@ -1796,7 +1798,7 @@ def compute_linelocs(self):

if line0loc is None:
if self.initphase == False:
logging.error("Unable to determine start of field - dropping field")
logger.error("Unable to determine start of field - dropping field")

return None, None, self.inlinelen * 200

Expand Down Expand Up @@ -2010,7 +2012,7 @@ def downscale(self, lineinfo = None, linesout = None, outwidth = None, channel='

dsout[(l - lineoffset) * outwidth:(l + 1 - lineoffset)*outwidth] = scaled
else:
logging.warning("WARNING: TBC failure at line %d", l)
logger.warning("WARNING: TBC failure at line %d", l)
dsout[(l - lineoffset) * outwidth:(l + 1 - lineoffset)*outwidth] = self.rf.SysParams['ire0']

if audio > 0 and self.rf.decode_analog_audio:
Expand Down Expand Up @@ -2349,7 +2351,7 @@ def get_following_field_number(self):
return 1 if newphase == 9 else newphase
else:
# This can be triggered by the first pass at the first field
#logging.error("Cannot determine PAL field sequence of first field")
#logger.error("Cannot determine PAL field sequence of first field")
return 1

def determine_field_number(self):
Expand Down Expand Up @@ -2489,7 +2491,7 @@ def buildCBuffer(self, subset = None):

return cbuffer

def splitIQ(self, cbuffer, line = 0):
def splitIQ_line(self, cbuffer, line = 0):
'''
NOTE: currently? only works on one line
Expand Down Expand Up @@ -2519,8 +2521,8 @@ def calcLine19Info(self, comb_field2 = None):
# fail out if there is obviously bad data
if not ((np.max(self.field.output_to_ire(self.field.dspicture[l19_slice_i70])) < 100) and
(np.min(self.field.output_to_ire(self.field.dspicture[l19_slice_i70])) > 40)):
#logging.info("WARNING: line 19 data incorrect")
#logging.info(np.max(self.field.output_to_ire(self.field.dspicture[l19_slice_i70])), np.min(self.field.output_to_ire(self.field.dspicture[l19_slice_i70])))
#logger.info("WARNING: line 19 data incorrect")
#logger.info(np.max(self.field.output_to_ire(self.field.dspicture[l19_slice_i70])), np.min(self.field.output_to_ire(self.field.dspicture[l19_slice_i70])))
return None, None, None

cbuffer = self.cbuffer[l19_slice]
Expand All @@ -2533,7 +2535,7 @@ def calcLine19Info(self, comb_field2 = None):
cbuffer -= comb_field2.cbuffer[l19_slice]
cbuffer /= 2

si, sq = self.splitIQ(cbuffer, 19)
si, sq = self.splitIQ_line(cbuffer, 19)

sl = slice(110,230)
cdata = np.sqrt((si[sl] ** 2.0) + (sq[sl] ** 2.0))
Expand Down Expand Up @@ -2658,7 +2660,7 @@ def calc_burstmedian(self):
return np.median(burstlevel) / self.rf.SysParams['hz_ire']

def apply_offsets(self, linelocs, phaseoffset, picoffset = 0):
#logging.info(phaseoffset, (phaseoffset * (self.rf.freq / (4 * 315 / 88))))
#logger.info(phaseoffset, (phaseoffset * (self.rf.freq / (4 * 315 / 88))))
return np.array(linelocs) + picoffset + (phaseoffset * (self.rf.freq / (4 * 315 / 88)))

def __init__(self, *args, **kwargs):
Expand Down Expand Up @@ -2691,14 +2693,18 @@ def process(self):

class LDdecode:

def __init__(self, fname_in, fname_out, freader, analog_audio = 0, digital_audio = False, system = 'NTSC', doDOD = True, threads=4, extra_options = {}):
def __init__(self, fname_in, fname_out, freader, _logger, est_frames = None, analog_audio = 0, digital_audio = False, system = 'NTSC', doDOD = True, threads=4, extra_options = {}):
global logger
logger = _logger
self.demodcache = None

self.branch, self.commit = get_git_info()

self.infile = open(fname_in, 'rb')
self.freader = freader

self.est_frames = est_frames

self.numthreads = threads

self.fields_written = 0
Expand Down Expand Up @@ -2870,8 +2876,10 @@ def decodefield(self, initphase = False):

self.rawdecode = self.demodcache.read(self.readloc_block * self.blocksize, self.numblocks * self.blocksize, self.mtf_level)

logger.debug('decoding field')

if self.rawdecode is None:
logging.info("Failed to demodulate data")
logger.info("Failed to demodulate data")
return None, None

self.indata = self.rawdecode['input']
Expand All @@ -2886,7 +2894,7 @@ def decodefield(self, initphase = False):
raise e

if not f.valid:
logging.info("Bad data - jumping one second")
#logger.info("Bad data - jumping one second")
return f, f.nextfieldoffset

return f, f.nextfieldoffset - (self.readloc - self.rawdecode['startloc'])
Expand Down Expand Up @@ -2924,7 +2932,7 @@ def readfield(self, initphase = False):
self.bw_ratios.append(metrics['blackToWhiteRFRatio'])
self.bw_ratios = self.bw_ratios[-keep:]

#logging.info(metrics['blackToWhiteRFRatio'], np.mean(self.bw_ratios))
#logger.info(metrics['blackToWhiteRFRatio'], np.mean(self.bw_ratios))

redo = not self.checkMTF(f, self.prevfield)

Expand Down Expand Up @@ -3136,7 +3144,7 @@ def computeMetrics(self, f, fp = None, verbose = False):

for l in whitelocs:
wl_slice = f.lineslice_tbc(*l)
#logging.info(l, np.mean(f.output_to_ire(f.dspicture[wl_slice])))
#logger.info(l, np.mean(f.output_to_ire(f.dspicture[wl_slice])))
if inrange(np.mean(f.output_to_ire(f.dspicture[wl_slice])), 90, 110):
f.whitesnr_slice = l
metrics['wSNR'] = self.calcpsnr(f, wl_slice)
Expand Down Expand Up @@ -3208,17 +3216,17 @@ def buildmetadata(self, f):
if prevfi is not None:
if not ((fi['fieldPhaseID'] == 1 and prevfi['fieldPhaseID'] == f.rf.SysParams['fieldPhases']) or
(fi['fieldPhaseID'] == prevfi['fieldPhaseID'] + 1)):
logging.warning('Field phaseID sequence mismatch ({0}->{1}) (player may be paused)'.format(prevfi['fieldPhaseID'], fi['fieldPhaseID']))
logger.warning('Field phaseID sequence mismatch ({0}->{1}) (player may be paused)'.format(prevfi['fieldPhaseID'], fi['fieldPhaseID']))
decodeFaults |= 2

if prevfi['isFirstField'] == fi['isFirstField']:
#logging.info('WARNING! isFirstField stuck between fields')
#logger.info('WARNING! isFirstField stuck between fields')
if inrange(fi['diskLoc'] - prevfi['diskLoc'], .95, 1.05):
decodeFaults |= 1
fi['isFirstField'] = not prevfi['isFirstField']
fi['syncConf'] = 10
else:
logging.error('Skipped field')
logger.error('Skipped field')
decodeFaults |= 4
fi['syncConf'] = 0
return fi, True
Expand All @@ -3239,21 +3247,44 @@ def buildmetadata(self, f):

rawloc = np.floor((self.readloc / self.bytes_per_field) / 2)

disk_Type = 'CLV' if self.isCLV else 'CAV'
disk_TimeCode = None
disk_Frame = None
special = None

try:
if self.isCLV and self.earlyCLV: # early CLV
print("file frame %d early-CLV minute %d" % (rawloc, self.clvMinutes), file=sys.stderr)
disk_TimeCode = f'{self.clvMinutes}:xx'
# print("file frame %d early-CLV minute %d" % (rawloc, self.clvMinutes), file=sys.stderr)
elif self.isCLV and self.frameNumber is not None:
print("file frame %d CLV timecode %d:%.2d.%.2d frame %d" % (rawloc, self.clvMinutes, self.clvSeconds, self.clvFrameNum, self.frameNumber), file=sys.stderr)
disk_TimeCode = "CLV Timecode %d:%.2d.%.2d frame %d" % (self.clvMinutes, self.clvSeconds, self.clvFrameNum, self.frameNumber)
elif self.frameNumber:
print("file frame %d CAV frame %d" % (rawloc, self.frameNumber), file=sys.stderr)
#print("file frame %d CAV frame %d" % (rawloc, self.frameNumber), file=sys.stderr)
disk_Frame = f'{self.frameNumber}'
elif self.leadIn:
print("file frame %d lead in" % (rawloc), file=sys.stderr)
special = "Lead In"
elif self.leadOut:
print("file frame %d lead out" % (rawloc), file=sys.stderr)
special = "Lead Out"
else:
special = "Unknown"

if self.est_frames is not None:
outstr = f"Frame {(self.fields_written//2)+1}/{int(self.est_frames)}: File Frame {int(rawloc)}: {disk_Type} "
else:
outstr = f"File Frame {int(rawloc)}: {disk_Type} "
if self.isCLV:
outstr += f"Timecode {disk_TimeCode} "
else:
print("file frame %d unknown" % (rawloc), file=sys.stderr)
outstr += f"Frame #{disk_Frame} "

if special is not None:
outstr += special

print(outstr, file=sys.stderr, end='\r')
logger.debug(outstr)
sys.stderr.flush()

# Prepare JSON fields
if self.frameNumber is not None:
fi['frameNumber'] = int(self.frameNumber)

Expand All @@ -3263,7 +3294,7 @@ def buildmetadata(self, f):
fi['clvSeconds'] = int(self.clvSeconds)
fi['clvFrameNr'] = int(self.clvFrameNum)
except:
logging.warning("file frame %d : VBI decoding error", rawloc)
logger.warning("file frame %d : VBI decoding error", rawloc)

return fi, False

Expand Down Expand Up @@ -3301,11 +3332,11 @@ def seek_getframenr(self, startfield):
fnum = self.decodeFrameNumber(self.prevfield, self.curfield)

if self.earlyCLV:
logging.error("Cannot seek in early CLV disks w/o timecode")
logger.error("Cannot seek in early CLV disks w/o timecode")
return None, startfield
elif fnum is not None:
rawloc = np.floor((self.readloc / self.bytes_per_field) / 2)
logging.info('seeking: file loc %d frame # %d', rawloc, fnum)
logger.info('seeking: file loc %d frame # %d', rawloc, fnum)

# Clear field memory on seeks
self.prevfield = None
Expand All @@ -3317,7 +3348,7 @@ def seek_getframenr(self, startfield):

def seek(self, startframe, target):
""" Attempts to find frame target from file location startframe """
logging.info("Beginning seek")
logger.info("Beginning seek")

if not sys.warnoptions:
import warnings
Expand All @@ -3332,7 +3363,7 @@ def seek(self, startframe, target):

cur = int((self.fieldloc / self.bytes_per_field))
if fnr == target:
logging.info("Finished seek")
logger.info("Finished seek")
print("Finished seeking, starting at frame", fnr, file=sys.stderr)
self.roughseek(cur)
return cur
Expand Down

0 comments on commit ca68709

Please sign in to comment.