Commit a579751b authored by Carlos Garnacho's avatar Carlos Garnacho

docs: Unify ontology documentation tools

Now we have a single tool for the general ontology documentation
and the per-class/property one.
parent 2a198496
...@@ -13,7 +13,7 @@ ONTOLOGY_INTRODUCTIONS = \ ...@@ -13,7 +13,7 @@ ONTOLOGY_INTRODUCTIONS = \
# Generation of the ontology XML files. # Generation of the ontology XML files.
gen-doc.stamp: $(ONTOLOGY_INTRODUCTIONS) gen-doc.stamp: $(ONTOLOGY_INTRODUCTIONS)
$(MKDIR_P) xml $(MKDIR_P) xml
$(top_srcdir)/docs/tools/gen-doc.sh $(top_builddir)/docs/tools/ttl2sgml $(top_builddir)/docs/tools/ttlresource2sgml $(top_srcdir)/src/ontologies/nepomuk xml/ $(top_builddir)/docs/tools/ttl2sgml -d $(top_srcdir)/src/ontologies/nepomuk -o xml/
$(AM_V_GEN) touch $@ $(AM_V_GEN) touch $@
version.xml: gen-doc.stamp version.xml: gen-doc.stamp
......
...@@ -13,14 +13,12 @@ version_xml = configure_file(input: 'version.xml.in', ...@@ -13,14 +13,12 @@ version_xml = configure_file(input: 'version.xml.in',
# -e $ONTOLOGIES_INFO_DIR/$PREFIX/explanation.xml # -e $ONTOLOGIES_INFO_DIR/$PREFIX/explanation.xml
gen_doc = meson.source_root() + '/docs/tools/gen-doc.sh'
generated = custom_target('ontology-doc-generated', generated = custom_target('ontology-doc-generated',
output: 'gen-doc.stamp', output: 'gen-doc.stamp',
command: [gen_doc, ttl2sgml, ttlresource2sgml, command: [ttl2sgml,
meson.source_root() + '/src/ontologies', '-d', meson.source_root() + '/src/ontologies/nepomuk',
meson.source_root() + '/docs/ontologies', '-o', join_paths(meson.current_build_dir(), 'xml/')],
join_paths(meson.current_build_dir(), 'xml/')], depends: ttl2sgml,
depends: [ttl2sgml, ttlresource2sgml],
# FIXME: this shouldn't be necessary, but currently the 'dependencies' # FIXME: this shouldn't be necessary, but currently the 'dependencies'
# parameter to gnome.gtkdoc() doesn't actually trigger building of custom # parameter to gnome.gtkdoc() doesn't actually trigger building of custom
# targets. # targets.
......
...@@ -53,6 +53,6 @@ ...@@ -53,6 +53,6 @@
<xi:include href="xml/slo-ontology.xml" /> <xi:include href="xml/slo-ontology.xml" />
<xi:include href="xml/tracker-ontology.xml" /> <xi:include href="xml/tracker-ontology.xml" />
<xi:include href="xml/maemo-ontology.xml" /> <xi:include href="xml/maemo-ontology.xml" />
<xi:include href="xml/libosinfo-ontology.xml" /> <xi:include href="xml/osinfo-ontology.xml" />
</part> </part>
</book> </book>
noinst_PROGRAMS = ttl2sgml ttlresource2sgml noinst_PROGRAMS = ttl2sgml
AM_CPPFLAGS = \ AM_CPPFLAGS = \
$(BUILD_CFLAGS) \ $(BUILD_CFLAGS) \
...@@ -24,12 +24,9 @@ TTL_LOADER_FILES = \ ...@@ -24,12 +24,9 @@ TTL_LOADER_FILES = \
ttl2sgml_SOURCES = \ ttl2sgml_SOURCES = \
$(TTL_LOADER_FILES) \ $(TTL_LOADER_FILES) \
ttlresource2sgml.c \
ttlresource2sgml.h \
ttl2sgml.c ttl2sgml.c
ttlresource2sgml_SOURCES = \
$(TTL_LOADER_FILES) \
ttlresource2sgml.c
EXTRA_DIST = \ EXTRA_DIST = \
gen-doc.sh \
meson.build meson.build
#!/usr/bin/env bash
#
# Generates the SGML documentation from a TTL description
# Copyright (C) 2009, Nokia <ivan.frade@nokia.com>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
set -e
if [ $# -lt 5 ]; then
echo "Insufficient arguments provided"
echo "Usage: $0 <ttl2sgml> <ttlres2sgml> <ontology-data-dir> <build-dir>"
exit 1;
fi
TTL2SGML=$1
TTLRES2SGML=$2
ONTOLOGIES_DATA_DIR=$3
BUILD_DIR=$4
if [ ! -e $BUILD_DIR ]; then
mkdir -p $BUILD_DIR
fi
$TTLRES2SGML -d $ONTOLOGIES_DATA_DIR -o $BUILD_DIR
for f in `find $ONTOLOGIES_DATA_DIR -name "*.description"` ; do
# ../../src/ontologies/XX-aaa.description -> PREFIX=aaa
TMPNAME=${f%.description}
PREFIX=${TMPNAME#*-}
$TTL2SGML -d $f -o $BUILD_DIR/$PREFIX-ontology.xml
done
echo "Done"
...@@ -7,11 +7,6 @@ ttl_loader_files = [ ...@@ -7,11 +7,6 @@ ttl_loader_files = [
] ]
ttl2sgml = executable('ttl2sgml', ttl2sgml = executable('ttl2sgml',
ttl_loader_files, 'ttl2sgml.c', ttl_loader_files, 'ttl2sgml.c', 'ttlresource2sgml.c',
dependencies: [tracker_data_dep, tracker_sparql_dep],
c_args: ['-DTRACKER_COMPILATION'])
ttlresource2sgml = executable('ttlresource2sgml',
ttl_loader_files, 'ttlresource2sgml.c',
dependencies: [tracker_data_dep, tracker_sparql_dep], dependencies: [tracker_data_dep, tracker_sparql_dep],
c_args: ['-DTRACKER_COMPILATION']) c_args: ['-DTRACKER_COMPILATION'])
...@@ -24,31 +24,85 @@ ...@@ -24,31 +24,85 @@
#include "ttl_loader.h" #include "ttl_loader.h"
#include "ttl_model.h" #include "ttl_model.h"
#include "ttl_sgml.h" #include "ttl_sgml.h"
#include "ttlresource2sgml.h"
static gchar *desc_file = NULL; static gchar *ontology_dir = NULL;
static gchar *output_file = NULL; static gchar *output_dir = NULL;
static GOptionEntry entries[] = { static GOptionEntry entries[] = {
{ "desc", 'd', 0, G_OPTION_ARG_FILENAME, &desc_file, { "ontology-dir", 'd', 0, G_OPTION_ARG_FILENAME, &ontology_dir,
"TTL file with the ontology description and documentation", "Ontology directory",
NULL NULL
}, },
{ "output", 'o', 0, G_OPTION_ARG_FILENAME, &output_file, { "output-dir", 'o', 0, G_OPTION_ARG_FILENAME, &output_dir,
"File to write the output (default stdout)", "File to write the output (default stdout)",
NULL NULL
}, },
{ NULL } { NULL }
}; };
static gint
compare_files (gconstpointer a,
gconstpointer b)
{
const GFile *file_a = a, *file_b = b;
gchar *basename_a, *basename_b;
gint res;
basename_a = g_file_get_basename ((GFile*) file_a);
basename_b = g_file_get_basename ((GFile*) file_b);
res = strcmp (basename_a, basename_b);
g_free (basename_a);
g_free (basename_b);
return res;
}
static GList *
get_description_files (GFile *dir)
{
GFileEnumerator *enumerator;
GFileInfo *info;
GFile *desc_file;
GList *files;
const gchar *name;
enumerator = g_file_enumerate_children (dir,
G_FILE_ATTRIBUTE_STANDARD_NAME,
G_FILE_QUERY_INFO_NONE,
NULL, NULL);
if (!enumerator) {
return NULL;
}
files = NULL;
while ((info = g_file_enumerator_next_file (enumerator, NULL, NULL)) != NULL) {
name = g_file_info_get_name (info);
if (g_str_has_suffix (name, ".description")) {
desc_file = g_file_enumerator_get_child (enumerator, info);
files = g_list_insert_sorted (files, desc_file, compare_files);
}
g_object_unref (info);
}
g_object_unref (enumerator);
return files;
}
gint gint
main (gint argc, gchar **argv) main (gint argc, gchar **argv)
{ {
GOptionContext *context; GOptionContext *context;
Ontology *ontology = NULL; Ontology *ontology = NULL;
OntologyDescription *description = NULL; OntologyDescription *description = NULL;
gchar *ttl_file = NULL; GList *description_files, *l;
gchar *dirname = NULL; GFile *ontology_file, *output_file;
FILE *f = NULL;
/* Translators: this messagge will apper immediately after the */ /* Translators: this messagge will apper immediately after the */
/* usage string - Usage: COMMAND [OPTION]... <THIS_MESSAGE> */ /* usage string - Usage: COMMAND [OPTION]... <THIS_MESSAGE> */
...@@ -59,11 +113,11 @@ main (gint argc, gchar **argv) ...@@ -59,11 +113,11 @@ main (gint argc, gchar **argv)
g_option_context_add_main_entries (context, entries, NULL); g_option_context_add_main_entries (context, entries, NULL);
g_option_context_parse (context, &argc, &argv, NULL); g_option_context_parse (context, &argc, &argv, NULL);
if (!desc_file) { if (!ontology_dir || !output_dir) {
gchar *help; gchar *help;
g_printerr ("%s\n\n", g_printerr ("%s\n\n",
"Description file is mandatory"); "Ontology and output dirs are mandatory");
help = g_option_context_get_help (context, TRUE, NULL); help = g_option_context_get_help (context, TRUE, NULL);
g_option_context_free (context); g_option_context_free (context);
...@@ -73,32 +127,44 @@ main (gint argc, gchar **argv) ...@@ -73,32 +127,44 @@ main (gint argc, gchar **argv)
return -1; return -1;
} }
if (output_file) { ontology_file = g_file_new_for_path (ontology_dir);
f = fopen (output_file, "w"); output_file = g_file_new_for_path (output_dir);
} else { description_files = get_description_files (ontology_file);
f = stdout;
if (!description_files) {
g_printerr ("Ontology description files not found in dir\n");
return -1;
} }
g_assert (f != NULL);
description = ttl_loader_load_description (desc_file); ontology = ttl_loader_new_ontology ();
dirname = g_path_get_dirname (desc_file); for (l = description_files; l; l = l->next) {
ttl_file = g_build_filename (dirname, Ontology *file_ontology = NULL;
description->relativePath, GFile *ttl_file, *ttl_output_file;
NULL); gchar *filename;
ontology = ttl_loader_load_ontology (ttl_file); description = ttl_loader_load_description (l->data);
g_free (ttl_file); ttl_file = g_file_get_child (ontology_file, description->relativePath);
g_free (dirname);
ttl_sgml_print (description, ontology, f); filename = g_strdup_printf ("%s-ontology.xml", description->localPrefix);
ttl_output_file = g_file_get_child (output_file, filename);
g_free (filename);
ttl_loader_free_ontology (ontology); file_ontology = ttl_loader_new_ontology ();
ttl_loader_free_description (description);
g_option_context_free (context); ttl_loader_load_ontology (ontology, ttl_file);
ttl_loader_load_ontology (file_ontology, ttl_file);
ttl_loader_load_prefix_from_description (ontology, description);
ttl_sgml_print (description, file_ontology, ttl_output_file);
ttl_loader_free_ontology (file_ontology);
ttl_loader_free_description (description);
}
generate_ontology_class_docs (ontology, output_file);
fclose (f); g_option_context_free (context);
return 0; return 0;
} }
...@@ -336,13 +336,11 @@ load_description (OntologyDescription *desc, ...@@ -336,13 +336,11 @@ load_description (OntologyDescription *desc,
} }
} }
Ontology * Ontology *
ttl_loader_load_ontology (const gchar *ttl_file) ttl_loader_new_ontology (void)
{ {
Ontology *ontology; Ontology *ontology;
g_debug ("Loading ontology... %s\n", ttl_file);
ontology = g_new0 (Ontology, 1); ontology = g_new0 (Ontology, 1);
ontology->classes = g_hash_table_new_full (g_str_hash, ontology->classes = g_hash_table_new_full (g_str_hash,
g_str_equal, g_str_equal,
...@@ -356,138 +354,55 @@ ttl_loader_load_ontology (const gchar *ttl_file) ...@@ -356,138 +354,55 @@ ttl_loader_load_ontology (const gchar *ttl_file)
ontology->prefixes = g_hash_table_new_full (g_str_hash, ontology->prefixes = g_hash_table_new_full (g_str_hash,
g_str_equal, g_str_equal,
g_free, g_free); g_free, g_free);
if (ttl_file) {
TrackerTurtleReader *reader;
GError *error = NULL;
GFile *file = g_file_new_for_path (ttl_file);
reader = tracker_turtle_reader_new (file, NULL);
g_object_unref (file);
while (error == NULL && tracker_turtle_reader_next (reader, &error)) {
load_in_memory (ontology,
tracker_turtle_reader_get_subject (reader),
tracker_turtle_reader_get_predicate (reader),
tracker_turtle_reader_get_object (reader));
}
g_object_unref (reader);
if (error) {
g_message ("Turtle parse error: %s", error->message);
g_error_free (error);
}
} else {
g_warning ("Unable to open '%s'", ttl_file);
}
return ontology; return ontology;
} }
static GList * void
get_ontology_files (GFile *dir) ttl_loader_load_ontology (Ontology *ontology,
GFile *ttl_file)
{ {
GFileEnumerator *enumerator; TrackerTurtleReader *reader;
GFileInfo *info; GError *error = NULL;
GList *files;
const gchar *name;
enumerator = g_file_enumerate_children (dir,
G_FILE_ATTRIBUTE_STANDARD_NAME,
G_FILE_QUERY_INFO_NONE,
NULL, NULL);
if (!enumerator) {
return NULL;
}
files = NULL;
while ((info = g_file_enumerator_next_file (enumerator, NULL, NULL)) != NULL) { g_return_if_fail (G_IS_FILE (ttl_file));
name = g_file_info_get_name (info);
if (g_str_has_suffix (name, ".ontology")) { reader = tracker_turtle_reader_new (ttl_file, NULL);
files = g_list_insert_sorted (files, g_strdup (name),
(GCompareFunc) g_strcmp0);
}
g_object_unref (info); while (error == NULL && tracker_turtle_reader_next (reader, &error)) {
load_in_memory (ontology,
tracker_turtle_reader_get_subject (reader),
tracker_turtle_reader_get_predicate (reader),
tracker_turtle_reader_get_object (reader));
} }
g_object_unref (enumerator); g_object_unref (reader);
return files; if (error) {
g_message ("Turtle parse error: %s", error->message);
g_error_free (error);
}
} }
Ontology * void
ttl_loader_load_ontology_dir (const gchar *ttl_dir) ttl_loader_load_prefix_from_description (Ontology *ontology,
OntologyDescription *description)
{ {
GFile *dir = g_file_new_for_path (ttl_dir); if (!g_hash_table_lookup (ontology->prefixes, description->baseUrl)) {
Ontology *ontology; g_hash_table_insert (ontology->prefixes,
GList *files, *f; g_strdup (description->baseUrl),
g_strdup (description->localPrefix));
ontology = g_new0 (Ontology, 1);
ontology->classes = g_hash_table_new_full (g_str_hash,
g_str_equal,
g_free,
(GDestroyNotify)ttl_model_class_free);
ontology->properties = g_hash_table_new_full (g_str_hash,
g_str_equal,
g_free,
(GDestroyNotify)ttl_model_property_free);
ontology->prefixes = g_hash_table_new_full (g_str_hash,
g_str_equal,
g_free, g_free);
files = get_ontology_files (dir);
g_object_unref (dir);
for (f = files; f; f = f->next) {
TrackerTurtleReader *reader;
GError *error = NULL;
gchar *ttl_file;
GFile *file;
ttl_file = g_build_filename (ttl_dir, f->data, NULL);
file = g_file_new_for_path (ttl_file);
reader = tracker_turtle_reader_new (file, NULL);
g_object_unref (file);
g_free (ttl_file);
while (error == NULL && tracker_turtle_reader_next (reader, &error)) {
load_in_memory (ontology,
tracker_turtle_reader_get_subject (reader),
tracker_turtle_reader_get_predicate (reader),
tracker_turtle_reader_get_object (reader));
}
g_object_unref (reader);
if (error) {
g_message ("Turtle parser error: %s", error->message);
g_error_free (error);
break;
}
} }
return ontology;
} }
OntologyDescription * OntologyDescription *
ttl_loader_load_description (const gchar *filename) ttl_loader_load_description (GFile *file)
{ {
OntologyDescription *desc; OntologyDescription *desc;
TrackerTurtleReader *reader; TrackerTurtleReader *reader;
GError *error = NULL; GError *error = NULL;
GFile *file;
desc = ttl_model_description_new (); desc = ttl_model_description_new ();
file = g_file_new_for_path (filename);
reader = tracker_turtle_reader_new (file, NULL); reader = tracker_turtle_reader_new (file, NULL);
g_object_unref (file);
while (error == NULL && tracker_turtle_reader_next (reader, &error)) { while (error == NULL && tracker_turtle_reader_next (reader, &error)) {
load_description (desc, load_description (desc,
......
...@@ -21,16 +21,19 @@ ...@@ -21,16 +21,19 @@
#define __TTL_LOADER_H__ #define __TTL_LOADER_H__
#include <glib.h> #include <glib.h>
#include <gio/gio.h>
#include "ttl_model.h" #include "ttl_model.h"
G_BEGIN_DECLS G_BEGIN_DECLS
void ttl_loader_init (void); Ontology * ttl_loader_new_ontology (void);
void ttl_loader_shutdown (void);
Ontology * ttl_loader_load_ontology (const gchar *filename); void ttl_loader_load_ontology (Ontology *ontology,
Ontology * ttl_loader_load_ontology_dir(const gchar *dir); GFile *filename);
OntologyDescription * ttl_loader_load_description (const gchar *filename); OntologyDescription * ttl_loader_load_description (GFile *filename);
void ttl_loader_load_prefix_from_description (Ontology *ontology,
OntologyDescription *description);
void ttl_loader_free_ontology (Ontology *ontology); void ttl_loader_free_ontology (Ontology *ontology);
void ttl_loader_free_description (OntologyDescription *desc); void ttl_loader_free_description (OntologyDescription *desc);
......
...@@ -196,12 +196,17 @@ print_ontology_class (Ontology *ontology, ...@@ -196,12 +196,17 @@ print_ontology_class (Ontology *ontology,
void void
ttl_sgml_print (OntologyDescription *description, ttl_sgml_print (OntologyDescription *description,
Ontology *ontology, Ontology *ontology,
FILE *f) GFile *file)
{ {
GHashTableIter iter; GHashTableIter iter;
gchar *upper_name; gchar *upper_name, *path;
OntologyClass *def; OntologyClass *def;
FILE *f;
path = g_file_get_path (file);
f = fopen (path, "w");
g_assert (f != NULL);
upper_name = g_ascii_strup (description->localPrefix, -1); upper_name = g_ascii_strup (description->localPrefix, -1);
print_sgml_header (f, description); print_sgml_header (f, description);
...@@ -220,5 +225,6 @@ ttl_sgml_print (OntologyDescription *description, ...@@ -220,5 +225,6 @@ ttl_sgml_print (OntologyDescription *description,
g_fprintf (f, "</section>\n"); g_fprintf (f, "</section>\n");
print_sgml_footer (f); print_sgml_footer (f);
g_free (upper_name); g_free (upper_name);
fclose (f);
} }
...@@ -27,8 +27,8 @@ ...@@ -27,8 +27,8 @@
G_BEGIN_DECLS G_BEGIN_DECLS
void ttl_sgml_print (OntologyDescription *description, void ttl_sgml_print (OntologyDescription *description,
Ontology *ontology, Ontology *ontology,
FILE *output); GFile *file);
G_END_DECLS G_END_DECLS
......
...@@ -26,24 +26,10 @@ ...@@ -26,24 +26,10 @@
#include "ttl_loader.h" #include "ttl_loader.h"
#include "ttl_model.h" #include "ttl_model.h"
#include "ttl_sgml.h" #include "ttl_sgml.h"
#include "ttlresource2sgml.h"
static gchar *ontology_dir = NULL;
static gchar *output_dir = NULL;
#define TRACKER_ONTOLOGY_CLASS "http://www.tracker-project.org/ontologies/tracker#Ontology" #define TRACKER_ONTOLOGY_CLASS "http://www.tracker-project.org/ontologies/tracker#Ontology"
static GOptionEntry entries[] = {
{ "ontology-dir", 'd', 0, G_OPTION_ARG_FILENAME, &ontology_dir,
"Ontology directory",
NULL
},
{ "output-dir", 'o', 0, G_OPTION_ARG_FILENAME, &output_dir,
"File to write the output (default stdout)",
NULL
},
{ NULL }
};
static gchar * static gchar *
name_get_prefix (Ontology *ontology, name_get_prefix (Ontology *ontology,
const gchar *name) const gchar *name)
...@@ -94,20 +80,25 @@ name_to_shortname (Ontology *ontology, ...@@ -94,20 +80,25 @@ name_to_shortname (Ontology *ontology,
static void static void
class_get_parent_hierarchy (Ontology *ontology, class_get_parent_hierarchy (Ontology *ontology,
OntologyClass *klass, const gchar *class_name,
GList **list) GList **list)
{ {
OntologyClass *klass;
GList *l; GList *l;
/* Ensure we only got the same class there once */ /* Ensure we only got the same class there once */
*list = g_list_remove (*list, klass->classname); *list = g_list_remove (*list, (gpointer) class_name);
*list = g_list_prepend (*list, klass->classname); *list = g_list_prepend (*list, (gpointer) class_name);
for (l = klass->superclasses; l; l = l->next) { klass = g_hash_table_lookup (ontology->classes, class_name);
OntologyClass *parent_class; if (!klass) {
klass = ttl_model_class_new (class_name);
g_hash_table_insert (ontology->classes, klass->classname, klass);
return;
}
parent_class = g_hash_table_lookup (ontology->classes, l->data); for (l = klass->superclasses; l; l = l->next) {
class_get_parent_hierarchy (ontology, parent_class, list); class_get_parent_hierarchy (ontology, l->data, list);
} }
} }
...@@ -126,7 +117,7 @@ class_get_hierarchy (Ontology *ontology, ...@@ -126,7 +117,7 @@ class_get_hierarchy (Ontology *ontology,
hierarchy = g_list_prepend (hierarchy, l->data); hierarchy = g_list_prepend (hierarchy, l->data);
} }
class_get_parent_hierarchy (ontology, klass, &hierarchy); class_get_parent_hierarchy (ontology, klass->classname, &hierarchy);
return hierarchy; return hierarchy;
} }
...@@ -676,6 +667,15 @@ print_properties (FILE *f, ...@@ -676,6 +667,15 @@ print_properties (FILE *f,
OntologyClass *cl; OntologyClass *cl;
superprop = g_hash_table_lookup (ontology->properties, l->data); superprop = g_hash_table_lookup (ontology->properties, l->data);
if (!superprop) {
superprop = ttl_model_property_new (l->data);
g_hash_table_insert (ontology->properties, superprop->propertyname, superprop);
}
if (!superprop->domain)
continue;