diff --git a/layergen.py b/layergen.py index 9a34bee..fc2db4e 100644 --- a/layergen.py +++ b/layergen.py @@ -159,8 +159,7 @@ class mstr_layergen: for line in file: linedata = line.split(" ") if linedata[0] == self._tag and linedata[1] == self._value: - src = linedata[2].split(",") - contrast = src[4] + contrast = int(linedata[4]) return contrast @@ -295,6 +294,8 @@ class mstr_layergen: # Find this layer's predetermined contrast lyr_contrast = self.findLayerContrast() + if lyr_contrast != 0: + mstr_msg("layergen", "Applying contrast value: " + str(lyr_contrast)) # Should this not exist yet, we need to create it #if os.path.isfile(gensrc_ptc) == False: @@ -376,18 +377,18 @@ class mstr_layergen: self._tag == "landuse" and self._value == "residential"): amt = randrange(2, 9) masks = glob.glob(mstr_datafolder + "textures/tile/completion/*.png") + patchtags = [ + ["landuse", "meadow"], + ["landuse", "grass"], + ["natural", "heath"], + ["natural", "scrub"] + ] for i in range(1, amt + 1): pick = randrange(0, len(masks)) patchmask = Image.open(masks[pick]) + patchmask = patchmask.rotate(randrange(0, 360), expand=True) patchpix = patchmask.load() # Pick from possible tags and values for the patches - patchtags = [ - ["landuse", "meadow"], - ["landuse", "grass"], - ["natural", "heath"], - ["natural", "scrub"] - ] - numbers = list(range(1, 16)) src = random.sample(numbers, 5) @@ -411,7 +412,7 @@ class mstr_layergen: nc = ( oc[0], oc[1], oc[2], ptc_msk[3] ) lp_pix[x,y] = nc - layerpatch = layerpatch.rotate(randrange(0, 360), expand=True) + #layerpatch = layerpatch.rotate(randrange(0, 360), expand=True) lx = randrange(self._imgsize - layerpatch.width) ly = randrange(self._imgsize - layerpatch.height) @@ -546,17 +547,17 @@ class mstr_layergen: d = randrange(41, 61) layer_comp_pix[x, y] = ( d,d,d,a[3] ) if self._tag == "highway" and self._value != "motorway": - d = randrange(0, 36) - dr = 90+d - dg = 90+d - db = 95+d + d = randrange(0, 6) + dr = 80+d + dg = 80+d + db = 85+d da = a[3] layer_comp_pix[x, y] = ( dr,dg,db,da ) if self._tag == "highway" and self._value == "motorway": - d = randrange(0, 36) - dr = 57+d - dg = 68+d - db = 70+d + d = randrange(0, 46) + dr = 47+d + dg = 58+d + db = 60+d layer_comp_pix[x, y] = ( dr,dg,db,a[3] ) if self._tag == "highway" and (self._value == "footway" or self._value == "track" or self._value == "path"): dr = randrange(158, 183) @@ -690,7 +691,7 @@ class mstr_layergen: ptc_src = [] # Find this layer's predetermined contrast - lyr_contrast = self.findLayerContrast() + lyr_contrast = randrange(1, 4) rg = mstr_resourcegen(self._tag, self._value, src) rg.setLayerContrast(int(lyr_contrast)) diff --git a/photogen.py b/photogen.py index 2a8cb5e..ce2decf 100644 --- a/photogen.py +++ b/photogen.py @@ -1,6 +1,7 @@ import os from PIL import Image, ImageFilter, ImageEnhance, ImageFile + from defines import * from layergen import * from log import * @@ -92,40 +93,42 @@ class mstr_photogen: if c < len(cpl)-1: cplstr = cplstr + "_" - # Should this not exist yet, we need to create it - rg = mstr_resourcegen("landuse", "meadow", cpl) - rg.setLayerContrast(randrange(1,4)) - ptcimg = rg.gensource() + # Find the right color catalogue + cpl_catalog = 0 + for c in range(len(mstr_completion_colors)): + if mstr_completion_colors[c][0] == edn: + cpl_catalog = c - ptc_src = [ptcimg[0]] - samples = 250 # <- We need this in a moment - for i in range(samples): - imgid = 0 - if len(ptc_src) == 1: imgid = 0 - l = 0 - int(ptc_src[imgid].width / 2) - r = cmpl.width - int(ptc_src[imgid].width / 2) - t = 0 - int(ptc_src[imgid].height / 2) - b = cmpl.height - int(ptc_src[imgid].height / 2) - cmpl.alpha_composite(ptc_src[imgid], (randrange(l, r), randrange(t, b))) - - brd_img = ptcimg[1] - cmpl.alpha_composite(brd_img) + # Put in some pixels + cmpl_bg = Image.new("RGBA", (self._tile.width, self._tile.height)) + cmpl_pix = cmpl_bg.load() + for y in range(0, self._tile.height): + for x in range(0, self._tile.width): + idx = randrange(0, len(mstr_completion_colors[cpl_catalog][1])) + clr = mstr_completion_colors[cpl_catalog][1][idx] + cmpl_pix[x,y] = clr + cmpl_bg = ImageEnhance.Contrast(cmpl_bg).enhance(0.8) + cmpl_bg = cmpl_bg.filter(ImageFilter.GaussianBlur(radius=1)) + cmpl_bg.alpha_composite(self._tile) + self._tile = cmpl_bg # Patches to add from other sources. If they don't exist, we also need to make them masks = glob.glob(mstr_datafolder + "textures/tile/completion/*.png") amt = randrange(5, 16) + patchtags = [ + ["landuse", "meadow"], + ["landuse", "grass"], + ["natural", "heath"], + ["natural", "scrub"] + ] + for i in range(1, amt + 1): pick = randrange(0, len(masks)) patchmask = Image.open(masks[pick]) + patchmask = patchmask.rotate(randrange(0, 360), expand=True) patchpix = patchmask.load() - # Pick from possible tags and values for the patches - patchtags = [ - ["landuse", "meadow"], - ["landuse", "grass"], - ["natural", "heath"], - ["natural", "scrub"] - ] + # Pick from possible tags and values for the patches numbers = list(range(1, 16)) src = random.sample(numbers, 5) @@ -149,10 +152,10 @@ class mstr_photogen: nc = (oc[0], oc[1], oc[2], ptc_msk[3]) lp_pix[x, y] = nc - layerpatch = layerpatch.rotate(randrange(0, 360), expand=True) + #layerpatch = layerpatch.rotate(randrange(0, 360), expand=True) - lx = randrange(self._imgsize - layerpatch.width) - ly = randrange(self._imgsize - layerpatch.height) + lx = randrange(0, self._imgsize - layerpatch.width) + ly = randrange(0, self._imgsize - layerpatch.height) cmpl.alpha_composite(layerpatch, (lx, ly)) # Merge the images