ignoring .DS_Store and adding commented test code
This commit is contained in:
parent
b11b3a48ee
commit
17f9563f7e
2 changed files with 33 additions and 3 deletions
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
.DS_Store
|
33
stacker.py
33
stacker.py
|
@ -3,6 +3,11 @@
|
|||
import os, sys
|
||||
import Image
|
||||
import time
|
||||
import numpy as np
|
||||
import scipy as sp
|
||||
|
||||
from scipy import misc
|
||||
|
||||
|
||||
start = time.time()
|
||||
|
||||
|
@ -11,6 +16,30 @@ start = time.time()
|
|||
dir = sys.argv[1]
|
||||
|
||||
list = os.listdir(dir)
|
||||
first = 0
|
||||
count = 0
|
||||
|
||||
# for file in list:
|
||||
# try:
|
||||
# im = misc.imread(dir + file)
|
||||
# except IOError:
|
||||
# continue
|
||||
#
|
||||
# count +=1
|
||||
# print 'Processing image ' + str(count)
|
||||
#
|
||||
# if first == 0:
|
||||
# new = im
|
||||
# first = 1
|
||||
# continue
|
||||
#
|
||||
# from numpy import *
|
||||
#
|
||||
# for nrow, row in np.nditer([new,im], op_flags=['readwrite']):
|
||||
# for npixel, pixel in np.nditer([nrow,row], op_flags=['readwrite']):
|
||||
# npixel = maximum(npixel, pixel)
|
||||
#
|
||||
# misc.imsave('stack_' + str(int(time.time())) + '.jpg', new)
|
||||
|
||||
images = []
|
||||
|
||||
|
@ -30,7 +59,7 @@ count = len(images)
|
|||
(w, h) = images[0].size
|
||||
new = Image.new('RGB', (w, h))
|
||||
i = 1
|
||||
pixels = [[[0, 0, 0] for j in range(h)] for j in range(w)]
|
||||
pixels = np.zeros((w,h,3), dtype=np.uint8)
|
||||
|
||||
for im in images:
|
||||
print "Image " + str(i) + " of " + str(count)
|
||||
|
@ -51,6 +80,6 @@ for x in range(w):
|
|||
for y in range(h):
|
||||
new.putpixel((x,y), (pixels[x][y][0], pixels[x][y][1], pixels[x][y][2]))
|
||||
|
||||
new.save('/Users/amdavidson/Desktop/stack_' + str(time.time()) + '.jpg', 'JPEG')
|
||||
new.save('stack_' + str(time.time()) + '.jpg', 'JPEG')
|
||||
|
||||
print str(count) + ' images processed in ' + str(int(time.time() - start)) + ' seconds.'
|
Loading…
Reference in a new issue