To read the structure of a literate programming web from a path in the file system.


§1. Web objects. Each web loaded in produces a single instance of the following. If W is an ls_web, note that W->chapters is the full list of all chapters in its program, including those imported from other webs: this may be different from W->main_module->chapters, which contains just its own chapters.

In fact, the W->chapters list is arguably redundant, since it's just a concatenation of the chapter lists of the modules, but it's much more convenient to store this redundant copy than to have to keep traversing the module tree.

typedef struct ls_web {
    struct wcl_declaration *declaration;
    struct ls_module *main_module; /* the root of a small dependency graph */
    struct linked_list *chapters; /* of |ls_chapter| */

    struct pathname *path_to_web; /* relative to the current working directory */
    struct filename *single_file; /* relative to the current working directory */
    int is_page; /* is this a simple one-section web with no contents page? */
    struct linked_list *bibliographic_data; /* of |web_bibliographic_datum| */
    struct linked_list *conventions; /* of |ls_conventions| */
    struct semantic_version_number version_number; /* as deduced from bibliographic data */
    struct ls_notation *web_notation; /* which version syntax the sections will have */
    int chaptered; /* has the author explicitly divided it into named chapters? */
    struct ls_index *index;

    struct programming_language *web_language; /* in which most of the sections are written */
    struct linked_list *tangle_target_names; /* of |text_stream| */
    struct linked_list *tangle_targets; /* of |tangle_target| */
    struct ls_chunk *definitions_chunk;

    struct filename *contents_filename; /* or |NULL| for a single-file web */
    struct linked_list *header_filenames; /* of |filename| */

    struct ls_holon_namespace *global_holon_namespace;

    void *weaving_ref;
    void *tangling_ref;
    void *analysis_ref;
    CLASS_DEFINITION
} ls_web;

ls_web *WebStructure::new_ls_web(wcl_declaration *D) {
    ls_web *W = CREATE(ls_web);
    W->declaration = D;
    if (D) D->object_declared = STORE_POINTER_ls_web(W);
    W->bibliographic_data = NEW_LINKED_LIST(web_bibliographic_datum);
    Bibliographic::initialise_data(W);
    W->conventions = NEW_LINKED_LIST(ls_conventions);
    ADD_TO_LINKED_LIST(Conventions::generic(), ls_conventions, W->conventions);
    W->is_page = FALSE;
    if ((D) && (D->modifier == PAGE_WCLMODIFIER)) W->is_page = TRUE;
    if ((D) && (D->scope)) {
        W->path_to_web = D->scope->associated_path;
        if (W->path_to_web == NULL)
            W->path_to_web = Filenames::up(D->scope->associated_file);
        W->single_file = NULL;
        W->contents_filename = NULL;
    } else if (D->associated_path) {
        W->path_to_web = D->associated_path;
        W->single_file = NULL;
        W->contents_filename = D->associated_file;
    } else {
        W->path_to_web = Filenames::up(D->associated_file);
        TEMPORARY_TEXT(ext)
        Filenames::write_extension(ext, D->associated_file);
        if (Str::eq_insensitive(ext, I".inwebc")) {
            W->single_file = NULL;
            W->contents_filename = D->associated_file;
        } else {
            W->single_file = D->associated_file;
            W->contents_filename = NULL;
        }
        DISCARD_TEXT(ext)
    }
    W->version_number = VersionNumbers::null();
    W->web_notation = NULL;
    W->chaptered = FALSE;
    W->index = WebIndexing::new_index();
    W->chapters = NEW_LINKED_LIST(ls_chapter);
    W->tangle_target_names = NEW_LINKED_LIST(text_stream);
    W->tangle_targets = NEW_LINKED_LIST(tangle_target);
    W->definitions_chunk = NULL;
    W->web_language = NULL;
    W->header_filenames = NEW_LINKED_LIST(filename);
    W->main_module = WebModules::create_main_module(W);
    W->global_holon_namespace = Holons::new_namespace(W, NULL);
    W->weaving_ref = NULL;
    W->tangling_ref = NULL;
    W->analysis_ref = NULL;
    return W;
}

ls_web *WebStructure::from_declaration(wcl_declaration *D) {
    if (D == NULL) return NULL;
    return RETRIEVE_POINTER_ls_web(D->object_declared);
}

§2. Web reading.

ls_web *WebStructure::read_fully(ls_colony *C, wcl_declaration *D,
    int enumerating, int weaving, int verbosely) {
    ls_web *W = WebStructure::from_declaration(D);
    Conventions::establish(W, C);
    WebNotation::adapt_to_conventions(W->web_notation, W->conventions);
    WebStructure::read_web_source(W, verbosely, weaving);
    Write the Inweb Version bibliographic datum2.1;
    CodeAnalysis::initialise_analysis_details(W);
    WeavingDetails::initialise(W);
    CodeAnalysis::analyse_web(W, enumerating, weaving);
    if (weaving) WebStructure::parse_markdown(W);
    return W;
}

§2.1. Write the Inweb Version bibliographic datum2.1 =

    TEMPORARY_TEXT(IB)
    WRITE_TO(IB, "output from tangler command 'Version Number'");
    web_bibliographic_datum *bd = Bibliographic::preset_datum(W, I"Inweb Version", IB);
    bd->declaration_permitted = FALSE;
    DISCARD_TEXT(IB)

§3. In some webs, the content of the commentary chunks is written in Markdown format. We're only going to parse this if we need to: for tangling, for example, we don't need to, and nor if the syntax doesn't use Markdown anyway.

void WebStructure::parse_markdown(ls_web *W) {
    markdown_variation *variation = WebNotation::commentary_variation(W);
    ls_chapter *Ch;
    ls_section *S;
    LOOP_OVER_LINKED_LIST(Ch, ls_chapter, W->chapters)
        LOOP_OVER_LINKED_LIST(S, ls_section, Ch->sections)
            LiterateSource::parse_markdown(S->literate_source, variation);
    WebErrors::issue_all_recorded(W);
}

§4. Statistics:

int WebStructure::chapter_count(ls_web *W) {
    int n = 0;
    ls_chapter *C;
    LOOP_OVER_LINKED_LIST(C, ls_chapter, W->chapters) n++;
    return n;
}
int WebStructure::imported_chapter_count(ls_web *W) {
    int n = 0;
    ls_chapter *C;
    LOOP_OVER_LINKED_LIST(C, ls_chapter, W->chapters)
        if (C->imported)
            n++;
    return n;
}
int WebStructure::section_count(ls_web *W) {
    int n = 0;
    ls_chapter *C; ls_section *S;
    LOOP_OVER_LINKED_LIST(C, ls_chapter, W->chapters)
        LOOP_OVER_LINKED_LIST(S, ls_section, C->sections)
            n++;
    return n;
}
int WebStructure::imported_section_count(ls_web *W) {
    int n = 0;
    ls_chapter *C; ls_section *S;
    LOOP_OVER_LINKED_LIST(C, ls_chapter, W->chapters)
        if (C->imported)
            LOOP_OVER_LINKED_LIST(S, ls_section, C->sections)
                n++;
    return n;
}
int WebStructure::paragraph_count(ls_web *W) {
    int n = 0;
    ls_chapter *C; ls_section *S;
    LOOP_OVER_LINKED_LIST(C, ls_chapter, W->chapters)
        LOOP_OVER_LINKED_LIST(S, ls_section, C->sections)
            if (S->literate_source)
                for (ls_paragraph *par = S->literate_source->first_par; par; par = par->next_par)
                    n++;
    return n;
}
int WebStructure::imported_paragraph_count(ls_web *W) {
    int n = 0;
    ls_chapter *C; ls_section *S;
    LOOP_OVER_LINKED_LIST(C, ls_chapter, W->chapters)
        if (C->imported)
            LOOP_OVER_LINKED_LIST(S, ls_section, C->sections)
                if (S->literate_source)
                    for (ls_paragraph *par = S->literate_source->first_par; par; par = par->next_par)
                        n++;
    return n;
}
int WebStructure::line_count(ls_web *W) {
    int n = 0;
    ls_chapter *C; ls_section *S;
    LOOP_OVER_LINKED_LIST(C, ls_chapter, W->chapters)
        LOOP_OVER_LINKED_LIST(S, ls_section, C->sections)
            n += (S->literate_source)?(S->literate_source->lines_read):0;
    return n;
}
int WebStructure::imported_line_count(ls_web *W) {
    int n = 0;
    ls_chapter *C; ls_section *S;
    LOOP_OVER_LINKED_LIST(C, ls_chapter, W->chapters)
        if (C->imported)
            LOOP_OVER_LINKED_LIST(S, ls_section, C->sections)
                n += (S->literate_source)?(S->literate_source->lines_read):0;
    return n;
}

int WebStructure::has_only_one_section(ls_web *W) {
    if (WebStructure::section_count(W) == 1) return TRUE;
    return FALSE;
}

int WebStructure::has_errors(ls_web *W) {
    ls_chapter *C; ls_section *S;
    LOOP_OVER_LINKED_LIST(C, ls_chapter, W->chapters)
        LOOP_OVER_LINKED_LIST(S, ls_section, C->sections)
            if (LiterateSource::unit_has_errors(S->literate_source))
                return TRUE;
    return FALSE;
}

§5. This really serves no purpose, but seems to boost morale.

void WebStructure::print_statistics(ls_web *W) {
    int s = 0, c = 0, n = 0, lc = 0;
    ls_chapter *C;
    LOOP_OVER_LINKED_LIST(C, ls_chapter, W->chapters) {
        c++;
        ls_section *S;
        LOOP_OVER_LINKED_LIST(S, ls_section, C->sections) {
            s++;
            for (ls_paragraph *par = S->literate_source->first_par; par; par = par->next_par)
                n++;
            lc += S->literate_source->lines_read;
        }
    }
    WebStructure::print_web_identity(W);
    PRINT(": ");
    int nm = WebModules::no_dependencies(W->main_module);
    if (nm > 1)
        PRINT("%d module%s : ", nm, (nm == 1)?"":"s");
    if (W->chaptered)
        PRINT("%d chapter%s : ", c, (c == 1)?"":"s");
    if (W->is_page == FALSE)
        PRINT("%d section%s : ", s, (s == 1)?"":"s");
    PRINT("%d paragraph%s : %d line%s\n",
        n, (n == 1)?"":"s",
        lc, (lc == 1)?"":"s");
}

void WebStructure::print_web_identity(ls_web *W) {
    PRINT("web \"%S\"", Bibliographic::get_datum(W, I"Title"));

    int commented = FALSE;
    if ((WebStructure::web_language(W)) && (Str::ne_insensitive(WebStructure::web_language(W)->language_name, I"None"))) {
        PRINT(" (%S program", WebStructure::web_language(W)->language_name);
        commented = TRUE;
    }
    if (W->web_notation) {
        if (commented) PRINT(" in "); else PRINT(" (");
        PRINT("%S notation", W->web_notation->name);
        commented = TRUE;
    }
    if (commented) PRINT(")");
}

§6. This is really for debugging:

void WebStructure::write_literate_source(OUTPUT_STREAM, ls_web *W) {
    ls_chapter *C;
    ls_section *S;
    LOOP_OVER_LINKED_LIST(C, ls_chapter, W->chapters)
        LOOP_OVER_LINKED_LIST(S, ls_section, C->sections)
            LiterateSource::write_lsu(OUT, S->literate_source);
}

§7. Chapter objects. The chapters list in a ls_web contains these as its entries. Instances of ls_chapter are never created for any other purpose, so they can exist only as part of an ls_web; and once added they are never removed.

typedef struct ls_chapter {
    struct ls_web *owning_web;
    struct ls_module *owning_module;
    int imported; /* did this originate in a different web? */
    struct linked_list *sections; /* of |ls_section| */

    struct text_stream *ch_range; /* e.g., |P| for Preliminaries, |7| for Chapter 7, |C| for Appendix C */
    struct text_stream *ch_title; /* e.g., "Chapter 3: Fresh Water Fish" */
    struct text_stream *ch_basic_title; /* e.g., "Chapter 3" */
    struct text_stream *ch_decorated_title; /* e.g., "Fresh Water Fish" */
    struct text_stream *rubric; /* optional; without double-quotation marks */

    struct text_stream *ch_language_name; /* in which most of the sections are written */
    struct programming_language *ch_language; /* in which this chapter is written */

    void *weaving_ref;
    void *tangling_ref;
    void *analysis_ref;
    CLASS_DEFINITION
} ls_chapter;

ls_chapter *WebStructure::new_ls_chapter(ls_web *W, text_stream *range, text_stream *titling) {
    if (W == NULL) internal_error("no web for chapter");
    ls_chapter *C = CREATE(ls_chapter);
    C->ch_range = Str::duplicate(range);
    C->ch_title = Str::duplicate(titling);
    match_results mr = Regexp::create_mr();
    if (Regexp::match(&mr, C->ch_title, U"(%c*?): *(%c*)")) {
        C->ch_basic_title = Str::duplicate(mr.exp[0]);
        C->ch_decorated_title = Str::duplicate(mr.exp[1]);
    } else {
        C->ch_basic_title = Str::duplicate(C->ch_title);
        C->ch_decorated_title = Str::new();
    }
    Regexp::dispose_of(&mr);
    C->rubric = Str::new();
    C->ch_language_name = NULL;
    C->ch_language = NULL;
    C->imported = FALSE;
    C->sections = NEW_LINKED_LIST(ls_section);
    C->owning_web = W;
    C->owning_module = NULL;
    C->weaving_ref = NULL;
    C->tangling_ref = NULL;
    C->analysis_ref = NULL;

    ADD_TO_LINKED_LIST(C, ls_chapter, W->chapters);
    return C;
}

§8. Section objects. The chapters list in an ls_chapter contains these as its entries. Instances of ls_section are never created for any other purpose, so they can exist only as part of an ls_chapter; and once added they are never removed.

typedef struct ls_section {
    struct ls_chapter *owning_chapter;

    struct text_stream *sect_title; /* e.g., "Program Control" */
    struct text_stream *sect_claimed_location; /* e.g., "../somewhere/else.w" */
    struct text_stream *sect_range; /* e.g., "2/ct" */

    struct text_stream *titling_line_to_insert;
    struct ls_unit *literate_source;

    struct filename *source_file_for_section; /* content either from a file... */
    struct wcl_declaration *source_declaration_for_section; /* ...or the body of a declaration */
    int skip_from; /* ignore lines numbered in this inclusive range */
    int skip_to;
    int sect_extent; /* total number of lines read from a file (including skipped ones) */

    struct text_stream *tag_name;

    struct programming_language *sect_language; /* in which this section is written */
    struct text_stream *sect_language_name;
    int is_independent_target;
    struct tangle_target *sect_target; /* |NULL| unless this section produces a tangle of its own */

    int scratch_flag; /* temporary workspace */

    void *weaving_ref;
    void *tangling_ref;
    void *analysis_ref;
    CLASS_DEFINITION
} ls_section;

ls_section *WebStructure::new_ls_section(ls_chapter *C, text_stream *titling, text_stream *at) {
    if (C == NULL) internal_error("no chapter for section");
    ls_section *S = CREATE(ls_section);
    S->owning_chapter = C;
    S->source_file_for_section = NULL;
    S->source_declaration_for_section = NULL;
    S->skip_from = 0;
    S->skip_to = 0;
    S->titling_line_to_insert = NULL;
    S->sect_range = Str::new();
    S->literate_source = NULL;
    S->sect_language_name = NULL;
    S->sect_language = NULL;
    S->is_independent_target = FALSE;
    S->sect_target = NULL;

    S->sect_title = NULL;
    S->sect_claimed_location = NULL;
    match_results mr = Regexp::create_mr();
    if (Regexp::match(&mr, titling, U"\"(%c+?)\" at \"(%c+)\" %^\"(%c+)\" *")) {
        WebStructure::name_section(S, mr.exp[0]);
        S->sect_claimed_location = Str::new();
        WRITE_TO(S->sect_claimed_location, "%S", at);
        if (Str::len(at) > 0) WRITE_TO(S->sect_claimed_location, "%c", FOLDER_SEPARATOR);
        WRITE_TO(S->sect_claimed_location, "%S", mr.exp[1]);
        S->tag_name = Str::duplicate(mr.exp[2]);
    } else if (Regexp::match(&mr, titling, U"\"(%c+?)\" at \"(%c+)\" *")) {
        S->sect_claimed_location = Str::new();
        WRITE_TO(S->sect_claimed_location, "%S", at);
        if (Str::len(at) > 0) WRITE_TO(S->sect_claimed_location, "%c", FOLDER_SEPARATOR);
        WRITE_TO(S->sect_claimed_location, "%S", mr.exp[1]);
    } else if (Regexp::match(&mr, titling, U"(%c+) %^\"(%c+)\" *")) {
        TEMPORARY_TEXT(name)
        WRITE_TO(name, "%S", at);
        if (Str::len(at) > 0) WRITE_TO(name, "%c", FOLDER_SEPARATOR);
        WRITE_TO(name, "%S", mr.exp[0]);
        WebStructure::name_section(S, name);
        DISCARD_TEXT(name)
        S->tag_name = Str::duplicate(mr.exp[1]);
    } else {
        TEMPORARY_TEXT(name)
        WRITE_TO(name, "%S", at);
        if (Str::len(at) > 0) WRITE_TO(name, "%c", FOLDER_SEPARATOR);
        WRITE_TO(name, "%S",titling);
        WebStructure::name_section(S, name);
        DISCARD_TEXT(name)
        S->tag_name = NULL;
    }
    Regexp::dispose_of(&mr);

    S->scratch_flag = FALSE;
    S->sect_extent = 0;

    S->weaving_ref = NULL;
    S->tangling_ref = NULL;
    S->analysis_ref = NULL;

    ADD_TO_LINKED_LIST(S, ls_section, C->sections);
    return S;
}

void WebStructure::name_section(ls_section *S, text_stream *name) {
    if ((Str::len(S->sect_title) > 0) &&
        (Str::ne(S->sect_title, I"All")) &&
        (Str::ne(name, S->sect_title))) {
        ls_web *W = S->owning_chapter->owning_web;
        wcl_declaration *D = W->declaration;
        TEMPORARY_TEXT(msg)
        WRITE_TO(msg, "section '%S' seems on closer inspection to be called '%S'",
            S->sect_title, name);
        WCL::error(D, &(D->declaration_position), msg);
        DISCARD_TEXT(msg)
    }
    S->sect_title = Str::duplicate(name);
}

int WebStructure::paragraph_count_within_section(ls_section *S) {
    int n = 0;
    for (ls_paragraph *par = S->literate_source->first_par; par; par = par->next_par)
        n++;
    return n;
}

§9. Woven and Tangled folders. We abstract these in order to be able to respond well to their not existing:

pathname *WebStructure::woven_folder(ls_web *W, int n) {
    pathname *P = Pathnames::down(W->path_to_web, I"Woven");
    if (Pathnames::create_in_file_system(P) == FALSE)
        Errors::fatal_with_path("unable to create Woven subdirectory", P);
    return P;
}
pathname *WebStructure::tangled_folder(ls_web *W) {
    pathname *P = Pathnames::down(W->path_to_web, I"Tangled");
    if (Pathnames::create_in_file_system(P) == FALSE)
        Errors::fatal_with_path("unable to create Tangled subdirectory", P);
    return P;
}

§10. Contents page. The contents page for a large web is usually at a fixed leafname, so:

int WebStructure::directory_looks_like_a_web(pathname *P) {
    if (TextFiles::exists(Filenames::in(P, I"Contents.w"))) return TRUE;
    if (TextFiles::exists(Filenames::in(P, I"Contents.inweb"))) return TRUE;
    if (TextFiles::exists(Filenames::in(P, I"Contents.inwebc"))) return TRUE;
    return FALSE;
}

§11. But mid-sized webs can consist more or less of an arbitrary file itself serving as contents page, so we won't assume it's always "Contents.w":

filename *WebStructure::contents_filename(ls_web *W) {
    return W->contents_filename;
}

§12. Reading from the file system. Webs can be stored in two ways: as a directory containing a multitude of files, in which case the pathname P is supplied; or as a single file with everything in one (and thus, implicitly, a single chapter and a single section), in which case a filename alt_F is supplied.

ls_web *WebStructure::parse_declaration(wcl_declaration *D) {
    ls_web *W = WebStructure::new_ls_web(D);

    if (W->is_page)
        SingleFileWebs::reconnoiter(W);
    else
        WebContents::read_contents_page(W, W->main_module, TRUE, NULL);
    if (W->web_notation == NULL) internal_error("no notation for web");

    Bibliographic::check_required_data(W);
    BuildFiles::set_bibliographic_data_for(W);
    BuildFiles::deduce_semver(W);
    Conventions::establish(W, NULL);
    return W;
}

§13. Web reading. All of that ran very quickly, but now things will slow down. The next function is where the actual contents of a web are read -- which means opening each section and reading it line by line. We read the complete literate source of the web into memory, which is profligate, but saves time.

void WebStructure::read_web_source(ls_web *W, int verbosely, int with_internals) {
    ls_chapter *C;
    ls_section *S;
    LOOP_OVER_LINKED_LIST(C, ls_chapter, W->chapters)
        LOOP_OVER_LINKED_LIST(S, ls_section, C->sections)
            Read one section from a file13.1;
    int dc = 0;
    LOOP_OVER_LINKED_LIST(C, ls_chapter, W->chapters)
        LOOP_OVER_LINKED_LIST(S, ls_section, C->sections) {
            Holons::vet_usage(S->literate_source);
            for (ls_paragraph *par = S->literate_source->first_par; par; par = par->next_par)
                for (ls_chunk *chunk = par->first_chunk; chunk; chunk = chunk->next_chunk)
                    if (chunk->chunk_type == DEFINITIONS_HERE_LSCT) {
                        dc++;
                        if (dc > 1) {
                            WebErrors::record_at(
                                I"definitions position set for a second time",
                                chunk->first_line);
                        } else {
                            W->definitions_chunk = chunk;
                        }
                    }
        }
    WebErrors::issue_all_recorded(W);
    WCL::report_errors(W->declaration);
}

§13.1. Read one section from a file13.1 =

    pathname *P = W->path_to_web;
    ls_module *M = S->owning_chapter->owning_module;
    if ((M) && (M->module_location))
        P = M->module_location; /* references are relative to module */

    S->literate_source = LiterateSource::begin_unit(S, W->web_notation, WebStructure::section_language(S), P, W);

    if (W->is_page) Insert an implied purpose, for a single-file web13.1.1;

    int cl = 0;
    if (S->source_declaration_for_section) {
        wcl_declaration *D = S->source_declaration_for_section;
        text_file_position tfp = D->body_position;
        if (S->source_file_for_section) tfp.text_file_filename = S->source_file_for_section;
        text_stream *L;
        LOOP_OVER_LINKED_LIST(L, text_stream, D->declaration_lines) {
            TEMPORARY_TEXT(line)
            Str::copy(line, L);
            WebStructure::scan_source_line(line, &tfp, (void *) S);
            DISCARD_TEXT(line);
            tfp.line_count++; cl++;
        }
    } else {
        filename *F = S->source_file_for_section;
        if (F == NULL) internal_error("no source file");
        cl = TextFiles::read(F, FALSE, "can't open section file", TRUE,
                WebStructure::scan_source_line, NULL, (void *) S);
    }

    LiterateSource::complete_unit(S->literate_source);
    if (Str::len(S->literate_source->heading.operand1) > 0) {
        WebStructure::name_section(S, S->literate_source->heading.operand1);
        if (W->is_page) Bibliographic::set_datum(W, I"Title", S->sect_title);
    }
    if (verbosely) PRINT("Read section: '%S' (%d lines)\n", S->sect_title, cl);

§13.1.1. Insert an implied purpose, for a single-file web13.1.1 =

    text_stream *purpose = Bibliographic::get_datum(W, I"Purpose");
    if (Str::len(purpose) > 0) LiterateSource::add_purpose(S->literate_source, NULL, purpose);

§14. Non-implied source lines come from here. Note that we assume here that trailing whitespace on a line is not significant in the language being tangled for.

void WebStructure::scan_source_line(text_stream *line, text_file_position *tfp, void *state) {
    ls_section *S = (ls_section *) state;
    S->sect_extent++;
    if ((S->skip_from > 0) && (S->skip_from <= tfp->line_count) && (tfp->line_count <= S->skip_to))
        return;
    int l = Str::len(line) - 1;
    while ((l>=0) && (Characters::is_space_or_tab(Str::get_at(line, l))))
        Str::truncate(line, l--);
    ls_line *L = LiterateSource::feed_line(S->literate_source, tfp, line);
    if (L->classification.major == INCLUDE_FILE_MAJLC) {
        filename *F = Filenames::from_text_relative(
            Filenames::up(tfp->text_file_filename), L->classification.operand1);
        if (TextFiles::exists(F)) {
            TextFiles::read(F, FALSE, "can't open included file", TRUE,
                WebStructure::scan_source_line, NULL, (void *) S);
        } else {
            Errors::fatal_with_file("include file not found", F);
        }
        L->classification.major = COMMENTARY_MAJLC;
        L->classification.minor = NO_MINLC;
        L->classification.operand1 = Str::new();
    }
}

§15. Language. I'm probably showing my age here: the default language for a web is C.

void WebStructure::resolve_declaration(wcl_declaration *D) {
    ls_web *W = RETRIEVE_POINTER_ls_web(D->object_declared);
    text_stream *language_name = Bibliographic::get_datum(W, I"Language");
    if (Str::len(language_name) == 0) language_name = I"C";
    W->web_language = Languages::find_or_fail(W, language_name);
    ls_chapter *C; ls_section *S;
    LOOP_OVER_LINKED_LIST(C, ls_chapter, W->chapters) {
        if (Str::len(C->ch_language_name) > 0)
            C->ch_language = Languages::find_or_fail(W, C->ch_language_name);
        LOOP_OVER_LINKED_LIST(S, ls_section, C->sections)
            if (Str::len(S->sect_language_name) > 0)
                S->sect_language = Languages::find_or_fail(W, S->sect_language_name);
    }
    Conventions::set_level(D, WEB_LSCONVENTIONLEVEL);
}

programming_language *WebStructure::section_language(ls_section *S) {
    if (S->sect_language == NULL) return WebStructure::chapter_language(S->owning_chapter);
    return S->sect_language;
}

programming_language *WebStructure::chapter_language(ls_chapter *C) {
    if (C->ch_language == NULL) return WebStructure::web_language(C->owning_web);
    return C->ch_language;
}

programming_language *WebStructure::web_language(ls_web *W) {
    return W->web_language;
}

void WebStructure::set_language(ls_web *W, programming_language *pl) {
    Bibliographic::set_datum(W, I"Language", pl->language_name);
    W->web_language = pl;
}

§16. Debugging. This is useful mainly for testing: it produces a verbose listing of everything in a web.

void WebStructure::write_web(OUTPUT_STREAM, ls_web *W, text_stream *range) {
    ls_chapter *C = WebRanges::to_chapter(W, range);
    if (C) {
        ls_section *S;
        LOOP_OVER_LINKED_LIST(S, ls_section, C->sections)
            LiterateSource::write_lsu(OUT, S->literate_source);
    } else {
        ls_section *S = WebRanges::to_section(W, range);
        if (S) {
            LiterateSource::write_lsu(OUT, S->literate_source);
        } else {
            LOOP_OVER_LINKED_LIST(C, ls_chapter, W->chapters)
                LOOP_OVER_LINKED_LIST(S, ls_section, C->sections)
                    LiterateSource::write_lsu(OUT, S->literate_source);
        }
    }
}