shithub: pokered

ref: b08a0b860ae7b666e3fd526de1f78809368d1bf6
dir: /prequeue.py/

View raw version
# coding: utf-8
"""
Starting a new python process to preprocess each source file creates too much
overhead. Instead, a list of files to preprocess is fed into a script run from
a single process.
"""

import os
import sys

import preprocessor

def preprocess_queue(filenames=sys.argv[1:]):

    stdin  = sys.stdin
    stdout = sys.stdout

    processor = preprocessor.setup_processor()

    for source in filenames:
        dest = os.path.splitext(source)[0] + '.tx'
        sys.stdin  = open(source, 'r')
        sys.stdout = open(dest, 'w')
        processor.preprocess()

    sys.stdin  = stdin
    sys.stdout = stdout

def main():
    filenames = list(set(sys.argv[1:]))
    if filenames:
        num_files = len(filenames)
        s = '' if num_files == 1 else 's'
        sys.stdout.write('Preprocessing {0} file{1}...\n'.format(num_files, s))
	preprocess_queue(filenames)

if __name__ == '__main__':
    main()