Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

text: Handle cases that "-" appears more than once #282

Open
wants to merge 7 commits into
base: url-from-stdin
Choose a base branch
from
3 changes: 2 additions & 1 deletion src/axel.c
Original file line number Diff line number Diff line change
Expand Up @@ -119,8 +119,9 @@ axel_new(conf_t *conf, int count, const search_t *res)
axel->url = u;

for (i = 0; i < count; i++) {
strlcpy(u[i].text, res[i].url, sizeof(u[i].text));
strlcpy(u[i].text, res->url, sizeof(u[i].text));
u[i].next = &u[i + 1];
res = res->next;
}
u[count - 1].next = u;

Expand Down
52 changes: 52 additions & 0 deletions src/search.c
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@
/* filesearching.com searcher */

#include "config.h"
#include <stdio.h>

#include "axel.h"
#include "sleep.h"

Expand Down Expand Up @@ -92,6 +94,52 @@ main(int argc, char *argv[])
}
#endif

/**
* Scan url list from file
*
* @returns number of URLs
*/
int
search_readlist(search_t *results, FILE *fd)
{
int nresults = 0;
search_t *cur, *tmp;

for (cur = results; ; nresults++) {
tmp = malloc(sizeof(search_t));
if (!tmp) {
fprintf(stderr, _("%s\n"), strerror(errno));
goto free_list;
}
do {
if (fgets(tmp->url, MAX_STRING, fd) == 0) {
free(tmp);
return nresults;
}
/* Ignore lines starting with "#" */
} while (tmp->url[0] == '#');
size_t len = strcspn(tmp->url, "\r\n");
/* Check the string ends with LF or CRLF */
if (!tmp->url[len]) {
fprintf(stderr, _("Error when trying to read URL (Too long?).\n"));
free(tmp);
goto free_list;
}
tmp->url[len] = '\0';
cur->next = tmp;
cur = tmp;
}

return nresults;
free_list:
for (cur = results->next; cur;) {
tmp = cur;
cur = cur->next;
free(tmp);
}
return 0;
}

int
search_makelist(search_t *results, char *orig_url)
{
Expand Down Expand Up @@ -297,7 +345,11 @@ search_speedtest(void *r)
void
search_sortlist(search_t *results, int count)
{
// FIXME sort linked list nodes
qsort(results, count, sizeof(search_t), search_sortlist_qsort);
for (int i = 1; i < count; i++) {
results[i - 1].next = &results[i];
}
}

static
Expand Down
2 changes: 2 additions & 0 deletions src/search.h
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,10 @@ typedef struct {
int speed, size;
pthread_t speed_thread[1];
conf_t *conf;
void *next;
} search_t;

int search_readlist(search_t *results, FILE *fd);
int search_makelist(search_t *results, char *url);
int search_getspeeds(search_t *results, int count);
void search_sortlist(search_t *results, int count);
Expand Down
111 changes: 66 additions & 45 deletions src/text.c
Original file line number Diff line number Diff line change
Expand Up @@ -96,9 +96,11 @@ main(int argc, char *argv[])
{
char fn[MAX_STRING];
int do_search = 0;
search_t *search;
search_t *search, *list, *tail;
search_t *tmp, *cur;
conf_t conf[1];
axel_t *axel;
int prealloc_num, url_num = 0;
int j, ret = 1;
char *s;

Expand Down Expand Up @@ -238,87 +240,106 @@ main(int argc, char *argv[])
ssl_init(conf);
#endif /* HAVE_SSL */

if (argc - optind == 0) {
prealloc_num = url_num = argc - optind;
if (url_num == 0) {
print_help();
goto free_conf;
} else if (strcmp(argv[optind], "-") == 0) {
s = malloc(MAX_STRING);
if (!s)
goto free_conf;
}

if (scanf("%1024[^\n]s", s) != 1) {
fprintf(stderr,
_("Error when trying to read URL (Too long?).\n"));
free(s);
goto free_conf;
}
} else {
s = argv[optind];
if (strlen(s) > MAX_STRING) {
fprintf(stderr,
_("Can't handle URLs of length over %zu\n"),
MAX_STRING);
goto free_conf;
list = calloc(prealloc_num, sizeof(search_t));
if (!list) {
fprintf(stderr, _("%s\n"), strerror(errno));
goto free_conf;
}

/* Load URLs from argv and stdin */
tmp = cur = list;
tail = list + prealloc_num - 1;
for (argv += optind; *argv; argv++) {
if (argv[0][0] == '-' && !argv[0][1]) {
/* Add URL list from stdin to end of list, "-"s will be ignored */
if (list + prealloc_num - 1 == tail) {
/* add nodes to end of the list */
url_num += search_readlist(list + prealloc_num - 1, stdin);
}
if (list == tail) { /* Only "-"s in argument list */
list = list->next;
free(tail);
} else {
/* Move forward, because "-" is ignored */
(tail - 1)->next = tail->next;
}
tail--;
url_num--;
} else {
/* Add URL from argv */
strlcpy(tmp->url, *argv, sizeof(tmp->url));
cur->next = tmp++;
cur = cur->next;
}
}
/* Fix next pointer of first node */
if (tail == list && url_num > 1) {
list->next = (list + 1)->next;
}

printf(_("Initializing download: %s\n"), s);
printf(_("Initializing download: %s\n"), list[0].url);
if (do_search) {
search = calloc(conf->search_amount + 1, sizeof(search_t));
if (!search)
goto free_conf;
if (!search) {
return 1;
}

search[0].conf = conf;
if (conf->verbose)
printf(_("Doing search...\n"));
int i = search_makelist(search, s);
// FIXME duplicated URL list[0].url
int i = search_makelist(search, list[0].url);
if (i < 0) {
fprintf(stderr, _("File not found\n"));
goto free_conf;
return 1;
}
if (conf->verbose)
printf(_("Testing speeds, this can take a while...\n"));

// FIXME only URLs in 'search' are tested and sorted
j = search_getspeeds(search, i);
if (j < 0) {
fprintf(stderr, _("Speed testing failed\n"));
// FIXME may leak memory
return 1;
}

search_sortlist(search, i);
j = min(j, conf->search_top);
/* add search results into URL list */
search[j - 1].next = list;
url_num += j;
if (conf->verbose) {
printf(_("%i usable servers found, will use these URLs:\n"),
j);
j = min(j, conf->search_top);
printf("%-60s %15s\n", "URL", _("Speed"));
for (i = 0; i < j; i++)
printf("%-70.70s %5i\n", search[i].url,
search[i].speed);
printf("\n");
}
axel = axel_new(conf, j, search);
free(search);
if (!axel || axel->ready == -1) {
print_messages(axel);
goto close_axel;
}
} else {
search = calloc(argc - optind, sizeof(search_t));
if (!search)
goto free_conf;
search = list;
}

for (int i = 0; i < argc - optind; i++)
strlcpy(search[i].url, argv[optind + i],
sizeof(search[i].url));
axel = axel_new(conf, argc - optind, search);
free(search);
if (!axel || axel->ready == -1) {
print_messages(axel);
goto close_axel;
}
#ifdef DEBUG
tmp = search;
for (int i = 0; i < url_num; i++) {
printf("URL loaded: %s\n", tmp->url);
tmp = tmp->next;
}
#endif

axel = axel_new(conf, url_num, search);
print_messages(axel);
if (s != argv[optind]) {
free(s);
if (!axel || axel->ready == -1) {
goto close_axel;
}

/* Check if a file name has been specified */
Expand Down