Package: apt-spy
Severity: important
Tags: patch
apt-spy has arbitrary length limits, like a URL size of 80 characters,
and a line length of 100 characters. The included patch removes those
limitations (and its application implicitly Closes: #315120).
Justin
Only in apt-spy-3.1.jp5: apt-spy
Only in apt-spy-3.1.jp5: build-stamp
Only in apt-spy-3.1.jp5: configure-stamp
diff -ur apt-spy-3.1/include/parse.h apt-spy-3.1.jp5/include/parse.h
--- apt-spy-3.1/include/parse.h 2005-07-08 20:48:09.000000000 -0400
+++ apt-spy-3.1.jp5/include/parse.h 2005-07-09 13:22:23.000000000 -0400
@@ -3,8 +3,7 @@
#ifndef __PARSE_H
#define __PARSE_H
-#define FTP 0
-#define HTTP 1
+enum protocol { FTP, HTTP };
/* hack */
extern int BESTNUMBER;
@@ -15,9 +14,9 @@
};
struct server_struct {
- char hostname[80];
- char path[2][80];
- char url[2][80];
+ char *hostname;
+ char *path[2];
+ char *url[2];
struct stats_struct stats;
};
diff -ur apt-spy-3.1/include/protocols.h apt-spy-3.1.jp5/include/protocols.h
--- apt-spy-3.1/include/protocols.h 2005-07-08 20:48:09.000000000 -0400
+++ apt-spy-3.1.jp5/include/protocols.h 2005-07-09 13:23:25.000000000 -0400
@@ -3,6 +3,8 @@
#ifndef __PROTOCOLS_H
#define __PROTOCOLS_H
-int get_file(server_t *current, CURL *curl, char *file, int protocol, size_t
*total_bytes);
+#include "parse.h" /* enum protocol */
+
+int get_file(server_t *current, CURL *curl, char *file, enum protocol
protocol, size_t *total_bytes);
#endif
diff -ur apt-spy-3.1/parse.c apt-spy-3.1.jp5/parse.c
--- apt-spy-3.1/parse.c 2005-07-08 20:48:09.000000000 -0400
+++ apt-spy-3.1.jp5/parse.c 2005-07-09 19:29:09.000000000 -0400
@@ -8,6 +8,7 @@
char *str_toupper(char *str);
#include <stdio.h>
+#include <assert.h>
#include <unistd.h>
#include <string.h>
#include <stdlib.h>
@@ -226,28 +227,32 @@
char *line, *save_line;
char *creation, *save_creation;
int counter = 0;
+ int len;
+
+ /* First, we read in a line from the file */
+ save_line = line = next_entry(mirror_list);
/* Allocate space for creation */
- save_creation = creation = malloc(100);
+ len=5+strlen(line);
+ save_creation = creation = malloc(len);
if (creation == NULL) {
perror("malloc");
exit(1);
}
-
- /* First, we read in a line from the file */
- save_line = line = next_entry(mirror_list);
-
/* test for file error */
if (ferror(mirror_list)) {
perror("fopen");
+ free(save_creation);
return NULL;
}
/* If the line begins with a space, we assume it is empty and the list
is exhausted. */
- if (isspace(*line) != 0)
+ if (isspace(*line) != 0) {
+ free(save_creation);
return NULL;
+ }
/* We now read the server name into "creation" */
while (isspace(*line) == 0)
@@ -290,50 +295,83 @@
void tokenise(server_t *current, char *cur_entry)
{
char *temp; /* We use this for temporary string-pointing :P */
+ static char null_string[]="";
/* We initialise the structure to 0 */
memset(current, 0, sizeof(*current));
/* First, we copy the server name into the struct. */
+ current->hostname=malloc(strlen(cur_entry));
+ if (!current->hostname) {
+ perror("malloc");
+ exit(1);
+ }
+
temp = current->hostname;
while (*cur_entry != ':')
*temp++ = *cur_entry++;
*temp++ = '\0'; /* Turn into string */
+ current->hostname=realloc(current->hostname,
1+strlen(current->hostname));
+ if (!current->hostname) {
+ perror("realloc");
+ exit(1);
+ }
/* We now check for an ftp entry and copy it in */
if (*(++cur_entry) != ':') {
+ current->path[FTP]=malloc(strlen(cur_entry));
+ if (!current->path[FTP]) {
+ perror("malloc");
+ exit(1);
+ }
temp = current->path[FTP];
while (*cur_entry != ':')
*temp++ = *cur_entry++;
*temp++ = '\0';
- }
+ current->path[FTP]=realloc(current->path[FTP],
1+strlen(current->path[FTP]));
+ if (!current->path[FTP]) {
+ perror("malloc");
+ exit(1);
+ }
+ } else current->path[FTP]=null_string;
/* And now check for HTTP entry */
if (*(++cur_entry) != ':') {
+ current->path[HTTP]=malloc(strlen(cur_entry));
+ if (!current->path[HTTP]) {
+ perror("malloc");
+ exit(1);
+ }
temp = current->path[HTTP];
while (*cur_entry != ':')
*temp++ = *cur_entry++;
*temp++ = '\0';
- }
+ current->path[HTTP]=realloc(current->path[HTTP],
1+strlen(current->path[HTTP]));
+ if (!current->path[HTTP]) {
+ perror("realloc");
+ exit(1);
+ }
+ } else current->path[HTTP]=null_string;
/* We're done for now */
}
int write_list(FILE *outfile_p, server_t *best, char *dist)
{
- char url[100];
+ char *url;
/* Make our mark ;) */
fprintf(outfile_p, "# sources.list generated by apt-spy %s\n",
apt_spy_v);
/* Copy URL information */
- if (best[0].stats.protocol == FTP)
- strncpy(url, best[0].url[FTP], 100);
- else
- strncpy(url, best[0].url[HTTP], 100);
+ if (best[0].stats.protocol == FTP) url=best[0].url[FTP];
+ else {
+ assert(best[0].stats.protocol == HTTP);
+ url=best[0].url[HTTP];
+ }
/* And write the line */
fprintf(outfile_p, "deb %s %s main\n", url, dist);
diff -ur apt-spy-3.1/protocol.c apt-spy-3.1.jp5/protocol.c
--- apt-spy-3.1/protocol.c 2005-07-08 20:48:09.000000000 -0400
+++ apt-spy-3.1.jp5/protocol.c 2005-07-09 18:33:06.000000000 -0400
@@ -4,15 +4,18 @@
*/
#include <string.h>
+#include <assert.h>
+#include <stdlib.h>
#include <curl/curl.h>
#include "include/parse.h"
#include "include/protocols.h"
-int get_file(server_t *current, CURL *curl, char *file, int protocol, size_t
*total_bytes)
+int get_file(server_t *current, CURL *curl, char *file, enum protocol
protocol, size_t *total_bytes)
{
char errorbuff[CURL_ERROR_SIZE + 1];
- char url[250];
+ int urlsize=0;
+ char *url;
double download_speed;
double download_time;
int error;
@@ -23,38 +26,46 @@
/* Give us human readable error messages in errorbuff */
if (curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, errorbuff) != 0)
return 1;
-
- /* Create the URL... */
- if (protocol == HTTP)
- strcpy(url, "http://");
- else
- strcpy(url, "ftp://");
- strncat(url, current->hostname, 80);
+ assert(HTTP==protocol || FTP==protocol);
+
urlsize=8+strlen(current->hostname)+strlen(current->path[protocol])+strlen(file);
+ url=malloc(urlsize);
+ if (!url) {
+ perror("malloc");
+ exit(1);
+ }
+
+ if (HTTP==protocol) strcpy(url, "http://");
+ else if (FTP==protocol) strcpy(url, "ftp://");
+ strcat(url, current->hostname);
+
/* Complete the URL. Also save it for later use when writing
sources.list */
if (protocol == HTTP) {
- strncat(url, current->path[HTTP], 80);
- strncpy(current->url[HTTP], url, 80);
- }
- else {
- strncat(url, current->path[FTP], 80);
- strncpy(current->url[FTP], url, 80);
+ strcat(url, current->path[HTTP]);
+ current->url[HTTP]=strdup(url);
+ } else {
+ strcat(url, current->path[FTP]);
+ current->url[FTP]=strdup(url);
}
- strncat(url, file, 80);
+ strcat(url, file);
/* And register it */
- if (curl_easy_setopt(curl, CURLOPT_URL, url) != 0)
+ if (curl_easy_setopt(curl, CURLOPT_URL, url) != 0) {
+ free(url);
return 1;
+ }
/* Now do the actual transfer */
error = curl_easy_perform(curl);
if (error != 0) {
if (error != CURLE_OPERATION_TIMEDOUT) {
+ free(url);
fprintf(stderr, "\t\tError: %s\n", errorbuff);
return 1;
} else if (*total_bytes == 0) {
+ free(url);
fprintf(stderr, "\t\tError: %s\n", errorbuff);
return 1;
}
@@ -65,6 +76,7 @@
if (*total_bytes == 0) {
fprintf(stderr, "\t\tNo data was transfered\n");
+ free(url);
return 1;
}
@@ -79,5 +91,7 @@
current->stats.speed = download_speed;
current->stats.protocol = protocol;
}
+
+ free(url);
return 0;
}