From ac5ed067cb6102153510e44fc3f0417340100ff7 Mon Sep 17 00:00:00 2001 From: Joe Corneli Date: Fri, 12 May 2017 16:24:35 +0100 Subject: [PATCH] more clarifications --- org/arxana-redux-refs.bib | 38 +++++-- org/arxana-redux.org | 264 +++++++++++++++++++++++++--------------------- 2 files changed, 175 insertions(+), 127 deletions(-) diff --git a/org/arxana-redux-refs.bib b/org/arxana-redux-refs.bib index b706b28..7004f8a 100644 --- a/org/arxana-redux-refs.bib +++ b/org/arxana-redux-refs.bib @@ -8,6 +8,7 @@ publisher={MetaScholar Initiative at Emory University}, pages={240--253}, year={2005}, url={http://metameso.org/~joe/docs/sbdm.html}, +keywords = {scholia, commons-based peer production} } @inproceedings{budzynska2014model, @@ -25,22 +26,25 @@ url={http://metameso.org/~joe/docs/sbdm.html}, url = {http://www.lrec-conf.org/proceedings/lrec2014/summaries/77.html}, timestamp = {Fri, 26 Sep 2014 08:11:49 +0200}, biburl = {http://dblp.uni-trier.de/rec/bib/conf/lrec/BudzynskaJRSSy14}, - bibsource = {dblp computer science bibliography, http://dblp.org} + bibsource = {dblp computer science bibliography, http://dblp.org}, + keywords = {inference anchoring theory, IAT} } @misc{corneli2017qand, -title={Towards mathematical AI via a model of the content and process of mathematical dialogues}, +title={Towards mathematical {AI} via a model of the content and process of mathematical dialogues}, author={Joseph Corneli and Ursula Martin and Dave Murray-Rust and Alison Pease}, howpublished={Submitted to: \emph{Conferences on Intelligent Computer Mathematics 2017}}, + keywords = {knowledge representation and reasoning, IATC} } @article{benkler2002coase, - title={Coase's Penguin, or, Linux and" The Nature of the Firm"}, + title={Coase's {Penguin}, or, {Linux} and {``}{The} {Nature} of the {Firm}{''}}, author={Benkler, Yochai}, journal={Yale Law Journal}, pages={369--446}, year={2002}, - publisher={JSTOR} + publisher={JSTOR}, + keywords = {commons-based peer production} } @@ -50,7 +54,8 @@ howpublished={Submitted to: \emph{Conferences on Intelligent Computer Mathematic journal={D-Lib magazine}, volume={9}, number={10}, - year={2003} + year={2003}, + keywords = {commons-based peer production, digital libraries} } @book{nelson1981literary, @@ -67,6 +72,27 @@ title = {Lakatos-style Collaborative Mathematics through Dialectical, Structured journal = {Artificial Intelligence}, volume = {246}, pages = {181--219}, -year = {2017, to appear}, +year = {2017}, +month={May}, addendum = {{\textbf{CORE: A*}}}, +keywords = {argumentation, mathematics} +} + +@inproceedings{kaliszyk2014developing, + title={Developing corpus-based translation methods between informal and formal mathematics}, + author={Kaliszyk, Cezary and Urban, Josef and Vysko{\v{c}}il, Ji{\v{r}}{\'\i} and Geuvers, Herman}, + booktitle={International Conference on Intelligent Computer Mathematics}, + pages={435--439}, + year={2014}, + organization={Springer}, + url={http://cl-informatik.uibk.ac.at/cek/docs/14/ckjujvhg-cicm14-poster.pdf}, + keywords = {corpus-based translation} +} + +@misc{kaliszyk2014developing-misc, + title={Developing corpus-based translation methods between informal and formal mathematics [{P}oster of \cite{kaliszyk2014developing}]}, + author={Kaliszyk, Cezary and Urban, Josef and Vysko{\v{c}}il, Ji{\v{r}}{\'\i} and Geuvers, Herman}, + url={http://cl-informatik.uibk.ac.at/cek/docs/14/ckjujvhg-cicm14-poster.pdf}, + note={\url{http://cl-informatik.uibk.ac.at/cek/docs/14/ckjujvhg-cicm14-poster.pdf}}, + keywords = {corpus statistics} } \ No newline at end of file diff --git a/org/arxana-redux.org b/org/arxana-redux.org index b7a0f0d..0086e62 100644 --- a/org/arxana-redux.org +++ b/org/arxana-redux.org @@ -15,35 +15,18 @@ #+HTML_HEAD: #+BIBLIOGRAPHY: arxana-redux-refs plain #+LATEX_HEADER: \usepackage[backend=bibtex,sorting=none]{biblatex} -#+LATEX_HEADER: \addbibresource{arxana-redux-refs} +#+LATEX_HEADER: \addbibresource{arxana-redux-refs} #+STARTUP: showeverything -* Timeline - -Note: this could be more nicely organized -using the org agenda. - -** Week 17th April -*** Science March on Saturday -** Week 24th April -*** Met: Tuesday 25th April -*** DONE Joe get this document under version control -** Week 1 May -*** Meet Monday 1 May (note: this is a Bank Holiday in the UK) -** Week 8 May -*** Meet Saturday May 13 -** Week 15 May -** Week 22 May -*** *Submit* 26th May -** Week 29 May - * Overview (Introduction) -For the Scheme conference at Oxford this fall, we will be making -a presentation on Arxana and math texts. We plan to get some -version of /Arxana/ working, then use it to mark up some -mathematical text along the lines of /IAT/ and then come up with -a demo which shows what such a /scholiumific representation/ is +For the Scheme 2017 workshop at the International Conference on +Functional Programming to take place at Oxford this Autumn, we will be +making a presentation on Arxana and math texts. + +We plan to get some version of /Arxana/ working, then use it to mark +up some mathematical text along the lines of /IAT/ and then come up +with a demo which shows what such a /scholiumific representation/ is good for. ** Glossary of keywords @@ -96,26 +79,26 @@ As we will see (*I should think! -JC*), even a proof itself is more conveniently represented by hypergraph structures. In particular, some of the ``predicates'' are really generative functions. Indeed, some of them are ``heuristics'' which take -some work to specify. +some work to specify. Another feature of our representation is that hypergraphs can be placed inside nodes and links of other hypergraphs. This allows -one to organize ther representation by levels of generality. For +one to organize ther representation by levels of generality. For instance, at the highest level, there might be the structure of statements and proofs of theorems. At thje next level, there would be the individual propositions and the argument structure. At the lowest level, there would be the grammatical structure of -the propositions. Having a way of ``coarse-graining'' the +the propositions. Having a way of ``coarse-graining'' the representation is important in order to have it be managable and allow users to zoom in and out of the map to find the appropriate level of detail. This is related to Fare's categorical theory of levels of a computer system [add citation]. -One effect of this is that the markup we are devising is ``progressive'' -in the sense that we can describe heuristics with nodes without fully -formalising them. A corresponding user feature is to be able to fold -things at many levels. This is important because the diagrams get -quite large. +One effect of this is that the markup we are devising is +``progressive'' in the sense that we can describe heuristics with +nodes without fully formalising them. A corresponding user feature is +to be able to fold things at many levels. This is important because +the diagrams get quite large. *** Proofs and prologues @@ -149,7 +132,7 @@ programming languages and long-running processes, e.g., MAXIMA. *** What is on offer in this approach -One thing we have here is links that contain code. +One thing we have here is links that contain code. Another thing hypergraphs give us is links to links - of course we can also think about higher-dimensional versions of the same, @@ -157,19 +140,19 @@ but links to link. * The core language -Here we'll describe the structures coming from several different places, -e.g., Gabi, Lakatos, etc. +Here we'll describe the structures coming from several different +places, e.g., Gabi, Lakatos, etc. -** Content (=structure=) :GRAY: -*** object $O$ is used in proposition $P$ +** Content (=structure=) :GRAY: +*** object $O$ is =used_in= proposition $P$ **** Example: ``The Caley graph of group $G$'' has sub-object $G$ -*** proposition $Q$ is a sub-proposition of proposition $P$ +*** proposition $Q$ is a =sub-proposition= of proposition $P$ **** Example: ``$P$ is difficult to prove'' has sub-proposition $P$ -*** proposition $R$ is a reformed version of proposition $P$ +*** proposition $R$ is a =reformed= version of proposition $P$ **** Example: ``Every $x$ is a $P$'' might be reformed to ``Some $x$ is a $P$'' -*** property $p$ is extensionally regarded as set of all objects with property $p$ -**** Example: primes -*** $O$ instantiates $C$ +*** property $p$ is as the =extensional_set= of all objects with property $p$ +**** Example: the set $\mathcal{P}$ of primes is the extension of the predicate $\mathit{prime}$. +*** $O$ =instantiates= some class $C$ **** Comment: This is not quite syntactic sugar for =has_property=! If we have a class of items $C$ we want to be able say that $x$ is a member of $C$. In principle, we could write $x$ =has_property= $C$. @@ -181,13 +164,13 @@ backquote; so e.g., we could write the true statement $prime(4)$ =instantiates= $prime(x)$, even though $prime(4)$ is false. **** Comment: Perhaps we need a further distinction between objects and 'schemas' So we might write =has_property(member_of(x,primes))= to talk about -class membership. Presumably (except in Russell paradox situations) +class membership. Presumably (except in Russell paradox situations) this is the same as =has_property(x,prime)=, or whatever. -*** $s$ expands $y$ +*** $s$ =expands= $y$ **** Comment: This is something that a computer algebra system like MAXIMA could confirm -*** $s$ sums $y$ +*** $s$ =sums= $y$ **** Comment: This is something that a computer algebra system like MAXIMA could confirm -*** $s$ contains as summand $t$ +*** $s$ =contains as summand= $t$ **** Comment: This is more specific than the =used in= relation This relation is interesting, for example, in the case of sums like $s+t$ where $t$ is, or might be, ``small''. Note that there @@ -198,15 +181,15 @@ relation that loses the distinction between all of these. However, generalising in this way does open a can of worms for addressing. I.e. do we need XPATH or something like that for indexing into something deeply nested? -*** $M$ generalises $m$ +*** $M$ =generalises= $m$ **** Comment: Here, content relationships between methods. Does this work? Instead of talking about content relationships 'at the content level' we are now interested in assertions like "binomial theorem" =generalises= "eliminate cross terms", where both of the quoted concepts are heuristics, not 'predicates' *** Proposition vs Object vs Binder vs Inference rule? -Note that there are different kinds of 'content'. If for example I -say $0