What's wrong with feeding it stack memory?
On Wed, Dec 7, 2016 at 12:33 PM, Marek Olšák <mar...@gmail.com> wrote: > From: Marek Olšák <marek.ol...@amd.com> > > --- > run.c | 6 +++++- > 1 file changed, 5 insertions(+), 1 deletion(-) > > diff --git a/run.c b/run.c > index 08fd543..ded224a 100644 > --- a/run.c > +++ b/run.c > @@ -656,28 +656,32 @@ main(int argc, char **argv) > > /* If there's only one GLSL shader, mark it separable so > * inputs and outputs aren't eliminated. > */ > if (num_shaders == 1 && type != TYPE_VP && type != TYPE_FP) > use_separate_shader_objects = true; > > if (use_separate_shader_objects) { > for (unsigned i = 0; i < num_shaders; i++) { > const char *const_text; > - char *text = alloca(shader[i].length + 1); > + unsigned size = shader[i].length + 1000; > + /* Using alloca crashes in the GLSL compiler. */ > + char *text = malloc(size); > + memset(text, 0, size); > > /* Make it zero-terminated. */ > memcpy(text, shader[i].text, shader[i].length); > text[shader[i].length] = 0; > > const_text = text; > glCreateShaderProgramv(shader[i].type, 1, &const_text); > + free(text); > } > } else if (type == TYPE_CORE || type == TYPE_COMPAT) { > GLuint prog = glCreateProgram(); > > for (unsigned i = 0; i < num_shaders; i++) { > GLuint s = glCreateShader(shader[i].type); > glShaderSource(s, 1, &shader[i].text, &shader[i].length); > glCompileShader(s); > > GLint param; > -- > 2.7.4 > > _______________________________________________ > mesa-dev mailing list > mesa-dev@lists.freedesktop.org > https://lists.freedesktop.org/mailman/listinfo/mesa-dev _______________________________________________ mesa-dev mailing list mesa-dev@lists.freedesktop.org https://lists.freedesktop.org/mailman/listinfo/mesa-dev