2021-03-16 17:46:03 +00:00
#!/usr/bin/perl -w
use warnings ;
use strict ;
use Text::Wrap ;
2022-05-25 13:30:29 +00:00
$ Text:: Wrap:: huge = 'overflow' ;
2022-06-16 03:25:36 +00:00
my $ projectfullname = 'Simple Directmedia Layer' ;
my $ projectshortname = 'SDL' ;
my $ wikisubdir = '' ;
my $ incsubdir = 'include' ;
2023-02-28 16:37:46 +00:00
my $ readmesubdir = undef ;
2022-06-16 03:25:36 +00:00
my $ apiprefixregex = undef ;
my $ versionfname = 'include/SDL_version.h' ;
my $ versionmajorregex = '\A\#define\s+SDL_MAJOR_VERSION\s+(\d+)\Z' ;
my $ versionminorregex = '\A\#define\s+SDL_MINOR_VERSION\s+(\d+)\Z' ;
my $ versionpatchregex = '\A\#define\s+SDL_PATCHLEVEL\s+(\d+)\Z' ;
my $ mainincludefname = 'SDL.h' ;
my $ selectheaderregex = '\ASDL.*?\.h\Z' ;
my $ projecturl = 'https://libsdl.org/' ;
my $ wikiurl = 'https://wiki.libsdl.org' ;
my $ bugreporturl = 'https://github.com/libsdl-org/sdlwiki/issues/new' ;
2021-03-16 17:46:03 +00:00
my $ srcpath = undef ;
my $ wikipath = undef ;
2023-02-28 16:37:46 +00:00
my $ wikireadmesubdir = 'README' ;
2021-03-16 17:46:03 +00:00
my $ warn_about_missing = 0 ;
my $ copy_direction = 0 ;
2022-06-16 03:25:36 +00:00
my $ optionsfname = undef ;
2022-06-17 18:39:50 +00:00
my $ wikipreamble = undef ;
2023-02-24 00:53:53 +00:00
my $ changeformat = undef ;
2021-03-16 17:46:03 +00:00
foreach ( @ ARGV ) {
$ warn_about_missing = 1 , next if $ _ eq '--warn-about-missing' ;
$ copy_direction = 1 , next if $ _ eq '--copy-to-headers' ;
2021-07-14 21:07:41 +00:00
$ copy_direction = 1 , next if $ _ eq '--copy-to-header' ;
2021-03-16 17:46:03 +00:00
$ copy_direction = - 1 , next if $ _ eq '--copy-to-wiki' ;
2022-01-06 20:37:05 +00:00
$ copy_direction = - 2 , next if $ _ eq '--copy-to-manpages' ;
2022-06-16 03:25:36 +00:00
if ( /\A--options=(.*)\Z/ ) {
$ optionsfname = $ 1 ;
next ;
2023-02-24 00:53:53 +00:00
} elsif ( /\A--changeformat=(.*)\Z/ ) {
$ changeformat = $ 1 ;
next ;
2022-06-16 03:25:36 +00:00
}
2021-03-16 17:46:03 +00:00
$ srcpath = $ _ , next if not defined $ srcpath ;
$ wikipath = $ _ , next if not defined $ wikipath ;
}
2022-06-16 03:25:36 +00:00
my $ default_optionsfname = '.wikiheaders-options' ;
$ default_optionsfname = "$srcpath/$default_optionsfname" if defined $ srcpath ;
if ( ( not defined $ optionsfname ) && ( - f $ default_optionsfname ) ) {
$ optionsfname = $ default_optionsfname ;
}
if ( defined $ optionsfname ) {
open OPTIONS , '<' , $ optionsfname or die ( "Failed to open options file '$optionsfname': $!\n" ) ;
while ( <OPTIONS> ) {
chomp ;
if ( /\A(.*?)\=(.*)\Z/ ) {
my $ key = $ 1 ;
my $ val = $ 2 ;
$ key =~ s/\A\s+// ;
$ key =~ s/\s+\Z// ;
$ val =~ s/\A\s+// ;
$ val =~ s/\s+\Z// ;
$ warn_about_missing = int ( $ val ) , next if $ key eq 'warn_about_missing' ;
$ srcpath = $ val , next if $ key eq 'srcpath' ;
$ wikipath = $ val , next if $ key eq 'wikipath' ;
$ apiprefixregex = $ val , next if $ key eq 'apiprefixregex' ;
$ projectfullname = $ val , next if $ key eq 'projectfullname' ;
$ projectshortname = $ val , next if $ key eq 'projectshortname' ;
$ wikisubdir = $ val , next if $ key eq 'wikisubdir' ;
$ incsubdir = $ val , next if $ key eq 'incsubdir' ;
2023-02-28 16:37:46 +00:00
$ readmesubdir = $ val , next if $ key eq 'readmesubdir' ;
2022-06-16 03:25:36 +00:00
$ versionmajorregex = $ val , next if $ key eq 'versionmajorregex' ;
$ versionminorregex = $ val , next if $ key eq 'versionminorregex' ;
$ versionpatchregex = $ val , next if $ key eq 'versionpatchregex' ;
$ versionfname = $ val , next if $ key eq 'versionfname' ;
$ mainincludefname = $ val , next if $ key eq 'mainincludefname' ;
$ selectheaderregex = $ val , next if $ key eq 'selectheaderregex' ;
$ projecturl = $ val , next if $ key eq 'projecturl' ;
$ wikiurl = $ val , next if $ key eq 'wikiurl' ;
$ bugreporturl = $ val , next if $ key eq 'bugreporturl' ;
2022-06-17 18:39:50 +00:00
$ wikipreamble = $ val , next if $ key eq 'wikipreamble' ;
2022-06-16 03:25:36 +00:00
}
}
close ( OPTIONS ) ;
}
2021-03-24 14:45:27 +00:00
my $ wordwrap_mode = 'mediawiki' ;
sub wordwrap_atom { # don't call this directly.
my $ str = shift ;
2022-05-25 14:42:11 +00:00
my $ retval = '' ;
# wordwrap but leave links intact, even if they overflow.
if ( $ wordwrap_mode eq 'mediawiki' ) {
while ( $ str =~ s/(.*?)\s*(\[https?\:\/\/.*?\s+.*?\])\s*//ms ) {
$ retval . = fill ( '' , '' , $ 1 ) ; # wrap it.
$ retval . = "\n$2\n" ; # don't wrap it.
}
} elsif ( $ wordwrap_mode eq 'md' ) {
while ( $ str =~ s/(.*?)\s*(\[.*?\]\(https?\:\/\/.*?\))\s*//ms ) {
$ retval . = fill ( '' , '' , $ 1 ) ; # wrap it.
$ retval . = "\n$2\n" ; # don't wrap it.
}
}
return $ retval . fill ( '' , '' , $ str ) ;
2021-03-24 14:45:27 +00:00
}
sub wordwrap_with_bullet_indent { # don't call this directly.
my $ bullet = shift ;
my $ str = shift ;
my $ retval = '' ;
2021-07-14 14:03:31 +00:00
#print("WORDWRAP BULLET ('$bullet'):\n\n$str\n\n");
2021-03-24 14:45:27 +00:00
# You _can't_ (at least with Pandoc) have a bullet item with a newline in
# MediaWiki, so _remove_ wrapping!
if ( $ wordwrap_mode eq 'mediawiki' ) {
$ retval = "$bullet$str" ;
$ retval =~ s/\n/ /gms ;
2021-07-14 14:03:31 +00:00
$ retval =~ s/\s+$//gms ;
#print("WORDWRAP BULLET DONE:\n\n$retval\n\n");
2021-03-24 14:45:27 +00:00
return "$retval\n" ;
}
my $ bulletlen = length ( $ bullet ) ;
# wrap it and then indent each line to be under the bullet.
$ Text:: Wrap:: columns -= $ bulletlen ;
my @ wrappedlines = split /\n/ , wordwrap_atom ( $ str ) ;
$ Text:: Wrap:: columns += $ bulletlen ;
my $ prefix = $ bullet ;
my $ usual_prefix = ' ' x $ bulletlen ;
foreach ( @ wrappedlines ) {
2023-02-24 00:53:53 +00:00
s/\s*\Z// ;
2021-03-24 14:45:27 +00:00
$ retval . = "$prefix$_\n" ;
$ prefix = $ usual_prefix ;
}
return $ retval ;
}
sub wordwrap_one_paragraph { # don't call this directly.
my $ retval = '' ;
my $ p = shift ;
#print "\n\n\nPARAGRAPH: [$p]\n\n\n";
if ( $ p =~ s/\A([\*\-] )// ) { # bullet list, starts with "* " or "- ".
my $ bullet = $ 1 ;
my $ item = '' ;
my @ items = split /\n/ , $ p ;
foreach ( @ items ) {
if ( s/\A([\*\-] )// ) {
$ retval . = wordwrap_with_bullet_indent ( $ bullet , $ item ) ;
$ item = '' ;
}
s/\A\s*// ;
$ item . = "$_\n" ; # accumulate lines until we hit the end or another bullet.
}
if ( $ item ne '' ) {
$ retval . = wordwrap_with_bullet_indent ( $ bullet , $ item ) ;
}
} else {
$ retval = wordwrap_atom ( $ p ) . "\n" ;
}
return $ retval ;
}
sub wordwrap_paragraphs { # don't call this directly.
my $ str = shift ;
my $ retval = '' ;
my @ paragraphs = split /\n\n/ , $ str ;
foreach ( @ paragraphs ) {
next if $ _ eq '' ;
$ retval . = wordwrap_one_paragraph ( $ _ ) ;
$ retval . = "\n" ;
}
return $ retval ;
}
2021-03-16 17:46:03 +00:00
my $ wordwrap_default_columns = 76 ;
sub wordwrap {
my $ str = shift ;
my $ columns = shift ;
$ columns = $ wordwrap_default_columns if not defined $ columns ;
$ columns += $ wordwrap_default_columns if $ columns < 0 ;
$ Text:: Wrap:: columns = $ columns ;
my $ retval = '' ;
2021-07-14 12:11:18 +00:00
#print("\n\nWORDWRAP:\n\n$str\n\n\n");
2021-07-14 14:03:31 +00:00
$ str =~ s/\A\n+//ms ;
2021-07-14 12:11:18 +00:00
while ( $ str =~ s/(.*?)(\`\`\`.*?\`\`\`|\<syntaxhighlight.*?\<\/syntaxhighlight\>)//ms ) {
#print("\n\nWORDWRAP BLOCK:\n\n$1\n\n ===\n\n$2\n\n\n");
2021-03-24 14:45:27 +00:00
$ retval . = wordwrap_paragraphs ( $ 1 ) ; # wrap it.
2021-07-14 12:11:18 +00:00
$ retval . = "$2\n\n" ; # don't wrap it.
2021-03-16 17:46:03 +00:00
}
2021-07-13 15:11:33 +00:00
$ retval . = wordwrap_paragraphs ( $ str ) ; # wrap what's left.
2021-07-14 15:58:57 +00:00
$ retval =~ s/\n+\Z//ms ;
2021-07-14 12:11:18 +00:00
#print("\n\nWORDWRAP DONE:\n\n$retval\n\n\n");
2021-07-13 15:11:33 +00:00
return $ retval ;
2021-03-16 17:46:03 +00:00
}
2021-07-14 12:11:18 +00:00
# This assumes you're moving from Markdown (in the Doxygen data) to Wiki, which
# is why the 'md' section is so sparse.
sub wikify_chunk {
2021-03-16 17:46:03 +00:00
my $ wikitype = shift ;
my $ str = shift ;
2021-07-14 12:11:18 +00:00
my $ codelang = shift ;
my $ code = shift ;
#print("\n\nWIKIFY CHUNK:\n\n$str\n\n\n");
2021-03-16 17:46:03 +00:00
if ( $ wikitype eq 'mediawiki' ) {
2021-09-30 21:34:25 +00:00
# convert `code` things first, so they aren't mistaken for other markdown items.
my $ codedstr = '' ;
while ( $ str =~ s/\A(.*?)\`(.*?)\`//ms ) {
my $ codeblock = $ 2 ;
$ codedstr . = wikify_chunk ( $ wikitype , $ 1 , undef , undef ) ;
2022-06-16 03:25:36 +00:00
if ( defined $ apiprefixregex ) {
# Convert obvious API things to wikilinks, even inside `code` blocks.
$ codeblock =~ s/\b($apiprefixregex[a-zA-Z0-9_]+)/[[$1]]/gms ;
}
2021-09-30 21:34:25 +00:00
$ codedstr . = "<code>$codeblock</code>" ;
}
2022-06-16 03:25:36 +00:00
# Convert obvious API things to wikilinks.
if ( defined $ apiprefixregex ) {
$ str =~ s/\b($apiprefixregex[a-zA-Z0-9_]+)/[[$1]]/gms ;
}
2021-03-16 17:46:03 +00:00
# Make some Markdown things into MediaWiki...
2022-05-25 14:42:11 +00:00
# links
$ str =~ s/\[(.*?)\]\((https?\:\/\/.*?)\)/\[$2 $1\]/g ;
2021-03-16 17:46:03 +00:00
# bold+italic
2021-03-25 16:52:15 +00:00
$ str =~ s/\*\*\*(.*?)\*\*\*/'''''$1'''''/gms ;
2021-03-16 17:46:03 +00:00
# bold
$ str =~ s/\*\*(.*?)\*\*/'''$1'''/gms ;
# italic
$ str =~ s/\*(.*?)\*/''$1''/gms ;
2021-03-24 14:45:27 +00:00
# bullets
$ str =~ s/^\- /* /gm ;
2021-07-14 12:11:18 +00:00
2021-09-30 21:34:25 +00:00
$ str = $ codedstr . $ str ;
2021-07-14 12:11:18 +00:00
if ( defined $ code ) {
$ str . = "<syntaxhighlight lang='$codelang'>$code<\/syntaxhighlight>" ;
}
2021-03-16 17:46:03 +00:00
} elsif ( $ wikitype eq 'md' ) {
2023-02-28 04:07:43 +00:00
# convert `code` things first, so they aren't mistaken for other markdown items.
my $ codedstr = '' ;
while ( $ str =~ s/\A(.*?)(\`.*?\`)//ms ) {
my $ codeblock = $ 2 ;
$ codedstr . = wikify_chunk ( $ wikitype , $ 1 , undef , undef ) ;
if ( defined $ apiprefixregex ) {
# Convert obvious API things to wikilinks, even inside `code` blocks,
# BUT ONLY IF the entire code block is the API thing,
# So something like "just call `SDL_Whatever`" will become
# "just call [`SDL_Whatever`](SDL_Whatever)", but
# "just call `SDL_Whatever(7)`" will not. It's just the safest
# way to do this without resorting to wrapping things in html <code> tags.
$ codeblock =~ s/\A\`($apiprefixregex[a-zA-Z0-9_]+)\`\Z/[`$1`]($1)/gms ;
}
$ codedstr . = $ codeblock ;
}
# Convert obvious API things to wikilinks.
2022-06-16 03:25:36 +00:00
if ( defined $ apiprefixregex ) {
2023-02-28 04:07:43 +00:00
$ str =~ s/\b($apiprefixregex[a-zA-Z0-9_]+)/[$1]($1)/gms ;
2022-06-16 03:25:36 +00:00
}
2023-02-28 04:07:43 +00:00
$ str = $ codedstr . $ str ;
2021-07-14 12:11:18 +00:00
if ( defined $ code ) {
$ str . = "```$codelang$code```" ;
}
2021-03-16 17:46:03 +00:00
}
2021-07-14 12:11:18 +00:00
#print("\n\nWIKIFY CHUNK DONE:\n\n$str\n\n\n");
2021-03-16 17:46:03 +00:00
return $ str ;
}
2021-07-14 12:11:18 +00:00
sub wikify {
2021-03-16 17:46:03 +00:00
my $ wikitype = shift ;
my $ str = shift ;
2021-07-14 12:11:18 +00:00
my $ retval = '' ;
2021-03-16 17:46:03 +00:00
2021-07-14 12:11:18 +00:00
#print("WIKIFY WHOLE:\n\n$str\n\n\n");
2021-03-16 17:46:03 +00:00
2021-07-14 12:11:18 +00:00
while ( $ str =~ s/\A(.*?)\`\`\`(c\+\+|c)(.*?)\`\`\`//ms ) {
$ retval . = wikify_chunk ( $ wikitype , $ 1 , $ 2 , $ 3 ) ;
2021-03-16 17:46:03 +00:00
}
2021-07-14 12:11:18 +00:00
$ retval . = wikify_chunk ( $ wikitype , $ str , undef , undef ) ;
2021-03-16 17:46:03 +00:00
2021-07-14 12:11:18 +00:00
#print("WIKIFY WHOLE DONE:\n\n$retval\n\n\n");
2021-03-16 17:46:03 +00:00
2021-07-14 12:11:18 +00:00
return $ retval ;
}
2022-01-06 20:37:05 +00:00
my $ dewikify_mode = 'md' ;
my $ dewikify_manpage_code_indent = 1 ;
2021-07-14 12:11:18 +00:00
sub dewikify_chunk {
my $ wikitype = shift ;
my $ str = shift ;
my $ codelang = shift ;
my $ code = shift ;
#print("\n\nDEWIKIFY CHUNK:\n\n$str\n\n\n");
2021-03-16 17:46:03 +00:00
2022-01-06 20:37:05 +00:00
if ( $ dewikify_mode eq 'md' ) {
if ( $ wikitype eq 'mediawiki' ) {
# Doxygen supports Markdown (and it just simply looks better than MediaWiki
# when looking at the raw headers), so do some conversions here as necessary.
2021-03-16 17:46:03 +00:00
2022-06-16 03:25:36 +00:00
# Dump obvious wikilinks.
if ( defined $ apiprefixregex ) {
$ str =~ s/\[\[($apiprefixregex[a-zA-Z0-9_]+)\]\]/$1/gms ;
}
2021-03-16 17:46:03 +00:00
2022-05-25 14:42:11 +00:00
# links
$ str =~ s/\[(https?\:\/\/.*?)\s+(.*?)\]/\[$2\]\($1\)/g ;
2022-01-06 20:37:05 +00:00
# <code></code> is also popular. :/
$ str =~ s/\<code>(.*?)<\/code>/`$1`/gms ;
2021-03-16 17:46:03 +00:00
2022-01-06 20:37:05 +00:00
# bold+italic
$ str =~ s/'''''(.*?)'''''/***$1***/gms ;
2021-03-16 17:46:03 +00:00
2022-01-06 20:37:05 +00:00
# bold
$ str =~ s/'''(.*?)'''/**$1**/gms ;
2021-03-16 17:46:03 +00:00
2022-01-06 20:37:05 +00:00
# italic
$ str =~ s/''(.*?)''/*$1*/gms ;
2021-03-24 14:45:27 +00:00
2022-01-06 20:37:05 +00:00
# bullets
$ str =~ s/^\* /- /gm ;
2023-02-24 00:53:53 +00:00
} elsif ( $ wikitype eq 'md' ) {
# Dump obvious wikilinks. The rest can just passthrough.
if ( defined $ apiprefixregex ) {
2023-02-28 04:07:43 +00:00
$ str =~ s/\[(\`?$apiprefixregex[a-zA-Z0-9_]+\`?)\]\($apiprefixregex[a-zA-Z0-9_]+\)/$1/gms ;
2023-02-24 00:53:53 +00:00
}
2022-01-06 20:37:05 +00:00
}
if ( defined $ code ) {
$ str . = "```$codelang$code```" ;
}
} elsif ( $ dewikify_mode eq 'manpage' ) {
$ str =~ s/\./\\[char46]/gms ; # make sure these can't become control codes.
if ( $ wikitype eq 'mediawiki' ) {
2022-06-16 03:25:36 +00:00
# Dump obvious wikilinks.
if ( defined $ apiprefixregex ) {
$ str =~ s/\s*\[\[($apiprefixregex[a-zA-Z0-9_]+)\]\]\s*/\n.BR $1\n/gms ;
}
2022-01-06 20:37:05 +00:00
2022-05-25 14:42:11 +00:00
# links
$ str =~ s/\[(https?\:\/\/.*?)\s+(.*?)\]/\n.URL "$1" "$2"\n/g ;
2022-01-06 20:37:05 +00:00
# <code></code> is also popular. :/
$ str =~ s/\s*\<code>(.*?)<\/code>\s*/\n.BR $1\n/gms ;
# bold+italic
$ str =~ s/\s*'''''(.*?)'''''\s*/\n.BI $1\n/gms ;
2021-03-16 17:46:03 +00:00
2022-01-06 20:37:05 +00:00
# bold
$ str =~ s/\s*'''(.*?)'''\s*/\n.B $1\n/gms ;
# italic
$ str =~ s/\s*''(.*?)''\s*/\n.I $1\n/gms ;
# bullets
$ str =~ s/^\* /\n\\\(bu /gm ;
2023-02-24 15:21:32 +00:00
} elsif ( $ wikitype eq 'md' ) {
# Dump obvious wikilinks.
if ( defined $ apiprefixregex ) {
2023-02-28 04:07:43 +00:00
$ str =~ s/\[(\`?$apiprefixregex[a-zA-Z0-9_]+\`?)\]\($apiprefixregex[a-zA-Z0-9_]+\)/\n.BR $1\n/gms ;
2023-02-24 15:21:32 +00:00
}
# links
$ str =~ s/\[(.*?)]\((https?\:\/\/.*?)\)/\n.URL "$2" "$1"\n/g ;
# <code></code> is also popular. :/
$ str =~ s/\s*\`(.*?)\`\s*/\n.BR $1\n/gms ;
# bold+italic
$ str =~ s/\s*\*\*\*(.*?)\*\*\*\s*/\n.BI $1\n/gms ;
# bold
$ str =~ s/\s*\*\*(.*?)\*\*\s*/\n.B $1\n/gms ;
# italic
$ str =~ s/\s*\*(.*?)\*\s*/\n.I $1\n/gms ;
# bullets
$ str =~ s/^\- /\n\\\(bu /gm ;
2022-01-06 20:37:05 +00:00
} else {
die ( "Unexpected wikitype when converting to manpages\n" ) ; # !!! FIXME: need to handle Markdown wiki pages.
}
if ( defined $ code ) {
$ code =~ s/\A\n+//gms ;
$ code =~ s/\n+\Z//gms ;
if ( $ dewikify_manpage_code_indent ) {
$ str . = "\n.IP\n"
} else {
$ str . = "\n.PP\n"
}
$ str . = ".EX\n$code\n.EE\n.PP\n" ;
}
} else {
die ( "Unexpected dewikify_mode\n" ) ;
2021-07-14 12:11:18 +00:00
}
#print("\n\nDEWIKIFY CHUNK DONE:\n\n$str\n\n\n");
2021-03-16 17:46:03 +00:00
return $ str ;
}
2021-07-14 12:11:18 +00:00
sub dewikify {
my $ wikitype = shift ;
my $ str = shift ;
return '' if not defined $ str ;
#print("DEWIKIFY WHOLE:\n\n$str\n\n\n");
$ str =~ s/\A[\s\n]*\= .*? \=\s*?\n+//ms ;
$ str =~ s/\A[\s\n]*\=\= .*? \=\=\s*?\n+//ms ;
my $ retval = '' ;
while ( $ str =~ s/\A(.*?)<syntaxhighlight lang='?(.*?)'?>(.*?)<\/syntaxhighlight\>//ms ) {
$ retval . = dewikify_chunk ( $ wikitype , $ 1 , $ 2 , $ 3 ) ;
}
$ retval . = dewikify_chunk ( $ wikitype , $ str , undef , undef ) ;
#print("DEWIKIFY WHOLE DONE:\n\n$retval\n\n\n");
return $ retval ;
}
2023-02-28 17:03:48 +00:00
sub filecopy {
my $ src = shift ;
my $ dst = shift ;
my $ endline = shift ;
$ endline = "\n" if not defined $ endline ;
open ( COPYIN , '<' , $ src ) or die ( "Failed to open '$src' for reading: $!\n" ) ;
open ( COPYOUT , '>' , $ dst ) or die ( "Failed to open '$dst' for writing: $!\n" ) ;
while ( <COPYIN> ) {
chomp ;
s/[ \t\r\n]*\Z// ;
print COPYOUT "$_$endline" ;
}
close ( COPYOUT ) ;
close ( COPYIN ) ;
}
2021-03-16 17:46:03 +00:00
sub usage {
2022-01-06 20:37:05 +00:00
die ( "USAGE: $0 <source code git clone path> <wiki git clone path> [--copy-to-headers|--copy-to-wiki|--copy-to-manpages] [--warn-about-missing]\n\n" ) ;
2021-03-16 17:46:03 +00:00
}
usage ( ) if not defined $ srcpath ;
usage ( ) if not defined $ wikipath ;
#usage() if $copy_direction == 0;
my @ standard_wiki_sections = (
'Draft' ,
'[Brief]' ,
2021-10-23 18:37:47 +00:00
'Deprecated' ,
2021-03-16 17:46:03 +00:00
'Syntax' ,
'Function Parameters' ,
'Return Value' ,
2021-03-24 14:46:05 +00:00
'Remarks' ,
2023-01-25 03:13:25 +00:00
'Thread Safety' ,
2021-03-16 17:46:03 +00:00
'Version' ,
2021-03-24 14:46:05 +00:00
'Code Examples' ,
2021-03-16 17:46:03 +00:00
'Related Functions'
) ;
2021-03-24 14:46:05 +00:00
# Sections that only ever exist in the wiki and shouldn't be deleted when
# not found in the headers.
my % only_wiki_sections = ( # The ones don't mean anything, I just need to check for key existence.
'Draft' , 1 ,
'Code Examples' , 1
) ;
2021-03-16 17:46:03 +00:00
my % headers = ( ) ; # $headers{"SDL_audio.h"} -> reference to an array of all lines of text in SDL_audio.h.
my % headerfuncs = ( ) ; # $headerfuncs{"SDL_OpenAudio"} -> string of header documentation for SDL_OpenAudio, with comment '*' bits stripped from the start. Newlines embedded!
my % headerdecls = ( ) ;
my % headerfuncslocation = ( ) ; # $headerfuncslocation{"SDL_OpenAudio"} -> name of header holding SDL_OpenAudio define ("SDL_audio.h" in this case).
my % headerfuncschunk = ( ) ; # $headerfuncschunk{"SDL_OpenAudio"} -> offset in array in %headers that should be replaced for this function.
2021-10-08 18:39:28 +00:00
my % headerfuncshasdoxygen = ( ) ; # $headerfuncschunk{"SDL_OpenAudio"} -> 1 if there was no existing doxygen for this function.
2021-03-16 17:46:03 +00:00
2022-06-16 03:25:36 +00:00
my $ incpath = "$srcpath" ;
$ incpath . = "/$incsubdir" if $ incsubdir ne '' ;
2023-02-28 16:37:46 +00:00
my $ wikireadmepath = "$wikipath/$wikireadmesubdir" ;
my $ readmepath = undef ;
if ( defined $ readmesubdir ) {
$ readmepath = "$srcpath/$readmesubdir" ;
}
2021-03-16 17:46:03 +00:00
opendir ( DH , $ incpath ) or die ( "Can't opendir '$incpath': $!\n" ) ;
2023-03-24 08:56:40 +00:00
while ( my $ d = readdir ( DH ) ) {
my $ dent = $ d ;
2022-06-16 03:25:36 +00:00
next if not $ dent =~ /$selectheaderregex/ ; # just selected headers.
2021-03-16 17:46:03 +00:00
open ( FH , '<' , "$incpath/$dent" ) or die ( "Can't open '$incpath/$dent': $!\n" ) ;
my @ contents = ( ) ;
while ( <FH> ) {
chomp ;
2021-10-08 18:39:28 +00:00
my $ decl ;
my @ templines ;
my $ str ;
my $ has_doxygen = 1 ;
2021-10-23 18:37:47 +00:00
if ( /\A\s*extern\s+(SDL_DEPRECATED\s+|)DECLSPEC/ ) { # a function declaration without a doxygen comment?
2021-10-08 18:39:28 +00:00
@ templines = ( ) ;
$ decl = $ _ ;
$ str = '' ;
$ has_doxygen = 0 ;
} elsif ( not /\A\/\*\*\s*\Z/ ) { # not doxygen comment start?
2021-03-16 17:46:03 +00:00
push @ contents , $ _ ;
next ;
2021-10-08 18:39:28 +00:00
} else { # Start of a doxygen comment, parse it out.
@ templines = ( $ _ ) ;
while ( <FH> ) {
chomp ;
push @ templines , $ _ ;
last if /\A\s*\*\/\Z/ ;
if ( s/\A\s*\*\s*\`\`\`/```/ ) { # this is a hack, but a lot of other code relies on the whitespace being trimmed, but we can't trim it in code blocks...
$ str . = "$_\n" ;
while ( <FH> ) {
chomp ;
push @ templines , $ _ ;
s/\A\s*\*\s?// ;
if ( s/\A\s*\`\`\`/```/ ) {
$ str . = "$_\n" ;
last ;
} else {
$ str . = "$_\n" ;
}
2021-07-14 12:11:18 +00:00
}
2021-10-08 18:39:28 +00:00
} else {
s/\A\s*\*\s*// ;
$ str . = "$_\n" ;
2021-07-14 12:11:18 +00:00
}
}
2021-03-16 17:46:03 +00:00
2021-10-08 18:39:28 +00:00
$ decl = <FH> ;
$ decl = '' if not defined $ decl ;
chomp ( $ decl ) ;
2021-10-23 18:37:47 +00:00
if ( not $ decl =~ /\A\s*extern\s+(SDL_DEPRECATED\s+|)DECLSPEC/ ) {
2021-10-08 18:39:28 +00:00
#print "Found doxygen but no function sig:\n$str\n\n";
foreach ( @ templines ) {
push @ contents , $ _ ;
}
push @ contents , $ decl ;
next ;
2021-03-16 17:46:03 +00:00
}
}
my @ decllines = ( $ decl ) ;
if ( not $ decl =~ /\)\s*;/ ) {
while ( <FH> ) {
chomp ;
push @ decllines , $ _ ;
s/\A\s+// ;
s/\s+\Z// ;
$ decl . = " $_" ;
last if /\)\s*;/ ;
}
}
$ decl =~ s/\s+\);\Z/);/ ;
$ decl =~ s/\s+\Z// ;
#print("DECL: [$decl]\n");
my $ fn = '' ;
2021-10-23 18:37:47 +00:00
if ( $ decl =~ /\A\s*extern\s+(SDL_DEPRECATED\s+|)DECLSPEC\s+(const\s+|)(unsigned\s+|)(.*?)\s*(\*?)\s*SDLCALL\s+(.*?)\s*\((.*?)\);/ ) {
$ fn = $ 6 ;
2021-03-16 17:46:03 +00:00
#$decl =~ s/\A\s*extern\s+DECLSPEC\s+(.*?)\s+SDLCALL/$1/;
} else {
#print "Found doxygen but no function sig:\n$str\n\n";
foreach ( @ templines ) {
push @ contents , $ _ ;
}
foreach ( @ decllines ) {
push @ contents , $ _ ;
}
next ;
}
$ decl = '' ; # build this with the line breaks, since it looks better for syntax highlighting.
foreach ( @ decllines ) {
if ( $ decl eq '' ) {
$ decl = $ _ ;
2021-10-23 18:37:47 +00:00
$ decl =~ s/\Aextern\s+(SDL_DEPRECATED\s+|)DECLSPEC\s+(.*?)\s+(\*?)SDLCALL\s+/$2$3 / ;
2021-03-16 17:46:03 +00:00
} else {
my $ trimmed = $ _ ;
2021-10-23 18:37:47 +00:00
# !!! FIXME: trim space for SDL_DEPRECATED if it was used, too.
2021-03-16 17:46:03 +00:00
$ trimmed =~ s/\A\s{24}// ; # 24 for shrinking to match the removed "extern DECLSPEC SDLCALL "
$ decl . = $ trimmed ;
}
$ decl . = "\n" ;
}
#print("$fn:\n$str\n\n");
2021-07-14 12:11:18 +00:00
2021-10-09 00:49:51 +00:00
# There might be multiple declarations of a function due to #ifdefs,
# and only one of them will have documentation. If we hit an
# undocumented one before, delete the placeholder line we left for
# it so it doesn't accumulate a new blank line on each run.
my $ skipfn = 0 ;
if ( defined $ headerfuncshasdoxygen { $ fn } ) {
if ( $ headerfuncshasdoxygen { $ fn } == 0 ) { # An undocumented declaration already exists, nuke its placeholder line.
delete $ contents [ $ headerfuncschunk { $ fn } ] ; # delete DOES NOT RENUMBER existing elements!
} else { # documented function already existed?
$ skipfn = 1 ; # don't add this copy to the list of functions.
if ( $ has_doxygen ) {
print STDERR "WARNING: Function '$fn' appears to be documented in multiple locations. Only keeping the first one we saw!\n" ;
}
push @ contents , join ( "\n" , @ decllines ) ; # just put the existing declation in as-is.
}
}
if ( ! $ skipfn ) {
$ headerfuncs { $ fn } = $ str ;
$ headerdecls { $ fn } = $ decl ;
$ headerfuncslocation { $ fn } = $ dent ;
$ headerfuncschunk { $ fn } = scalar ( @ contents ) ;
$ headerfuncshasdoxygen { $ fn } = $ has_doxygen ;
push @ contents , join ( "\n" , @ templines ) ;
push @ contents , join ( "\n" , @ decllines ) ;
}
2021-03-16 17:46:03 +00:00
}
close ( FH ) ;
$ headers { $ dent } = \ @ contents ;
}
closedir ( DH ) ;
# !!! FIXME: we need to parse enums and typedefs and structs and defines and and and and and...
# !!! FIXME: (but functions are good enough for now.)
my % wikitypes = ( ) ; # contains string of wiki page extension, like $wikitypes{"SDL_OpenAudio"} == 'mediawiki'
my % wikifuncs = ( ) ; # contains references to hash of strings, each string being the full contents of a section of a wiki page, like $wikifuncs{"SDL_OpenAudio"}{"Remarks"}.
my % wikisectionorder = ( ) ; # contains references to array, each array item being a key to a wikipage section in the correct order, like $wikisectionorder{"SDL_OpenAudio"}[2] == 'Remarks'
opendir ( DH , $ wikipath ) or die ( "Can't opendir '$wikipath': $!\n" ) ;
2023-03-24 08:56:40 +00:00
while ( my $ d = readdir ( DH ) ) {
my $ dent = $ d ;
2021-03-16 17:46:03 +00:00
my $ type = '' ;
2022-06-16 03:25:36 +00:00
if ( $ dent =~ /\.(md|mediawiki)\Z/ ) {
2021-03-16 17:46:03 +00:00
$ type = $ 1 ;
} else {
next ; # only dealing with wiki pages.
}
2022-06-23 20:07:35 +00:00
my $ fn = $ dent ;
$ fn =~ s/\..*\Z// ;
# Ignore FrontPage.
next if $ fn eq 'FrontPage' ;
2022-06-17 21:35:52 +00:00
# Ignore "Category*" pages.
2022-06-23 20:07:35 +00:00
next if ( $ fn =~ /\ACategory/ ) ;
2022-06-17 21:35:52 +00:00
2021-03-16 17:46:03 +00:00
open ( FH , '<' , "$wikipath/$dent" ) or die ( "Can't open '$wikipath/$dent': $!\n" ) ;
my $ current_section = '[start]' ;
my @ section_order = ( $ current_section ) ;
my % sections = ( ) ;
$ sections { $ current_section } = '' ;
2022-06-17 18:39:50 +00:00
my $ firstline = 1 ;
2021-03-16 17:46:03 +00:00
while ( <FH> ) {
chomp ;
my $ orig = $ _ ;
s/\A\s*// ;
s/\s*\Z// ;
if ( $ type eq 'mediawiki' ) {
2022-06-17 18:39:50 +00:00
if ( defined ( $ wikipreamble ) && $ firstline && /\A\=\=\=\=\=\= (.*?) \=\=\=\=\=\=\Z/ && ( $ 1 eq $ wikipreamble ) ) {
$ firstline = 0 ; # skip this.
next ;
} elsif ( /\A\= (.*?) \=\Z/ ) {
$ firstline = 0 ;
2021-03-16 17:46:03 +00:00
$ current_section = ( $ 1 eq $ fn ) ? '[Brief]' : $ 1 ;
die ( "Doubly-defined section '$current_section' in '$dent'!\n" ) if defined $ sections { $ current_section } ;
push @ section_order , $ current_section ;
$ sections { $ current_section } = '' ;
} elsif ( /\A\=\= (.*?) \=\=\Z/ ) {
2022-06-17 18:39:50 +00:00
$ firstline = 0 ;
2021-03-16 17:46:03 +00:00
$ current_section = ( $ 1 eq $ fn ) ? '[Brief]' : $ 1 ;
die ( "Doubly-defined section '$current_section' in '$dent'!\n" ) if defined $ sections { $ current_section } ;
push @ section_order , $ current_section ;
$ sections { $ current_section } = '' ;
next ;
} elsif ( /\A\-\-\-\-\Z/ ) {
2022-06-17 18:39:50 +00:00
$ firstline = 0 ;
2021-03-16 17:46:03 +00:00
$ current_section = '[footer]' ;
die ( "Doubly-defined section '$current_section' in '$dent'!\n" ) if defined $ sections { $ current_section } ;
push @ section_order , $ current_section ;
$ sections { $ current_section } = '' ;
next ;
}
} elsif ( $ type eq 'md' ) {
2022-06-17 18:39:50 +00:00
if ( defined ( $ wikipreamble ) && $ firstline && /\A\#\#\#\#\#\# (.*?)\Z/ && ( $ 1 eq $ wikipreamble ) ) {
$ firstline = 0 ; # skip this.
next ;
} elsif ( /\A\#+ (.*?)\Z/ ) {
$ firstline = 0 ;
2021-03-16 17:46:03 +00:00
$ current_section = ( $ 1 eq $ fn ) ? '[Brief]' : $ 1 ;
die ( "Doubly-defined section '$current_section' in '$dent'!\n" ) if defined $ sections { $ current_section } ;
push @ section_order , $ current_section ;
$ sections { $ current_section } = '' ;
next ;
} elsif ( /\A\-\-\-\-\Z/ ) {
2022-06-17 18:39:50 +00:00
$ firstline = 0 ;
2021-03-16 17:46:03 +00:00
$ current_section = '[footer]' ;
die ( "Doubly-defined section '$current_section' in '$dent'!\n" ) if defined $ sections { $ current_section } ;
push @ section_order , $ current_section ;
$ sections { $ current_section } = '' ;
next ;
}
} else {
die ( "Unexpected wiki file type. Fixme!\n" ) ;
}
2022-06-17 18:39:50 +00:00
if ( $ firstline ) {
$ firstline = ( $ _ ne '' ) ;
}
if ( ! $ firstline ) {
$ sections { $ current_section } . = "$orig\n" ;
}
2021-03-16 17:46:03 +00:00
}
close ( FH ) ;
2021-03-24 16:52:48 +00:00
foreach ( keys % sections ) {
$ sections { $ _ } =~ s/\A\n+// ;
$ sections { $ _ } =~ s/\n+\Z// ;
$ sections { $ _ } . = "\n" ;
}
2021-03-16 17:46:03 +00:00
if ( 0 ) {
foreach ( @ section_order ) {
print ( "$fn SECTION '$_':\n" ) ;
print ( $ sections { $ _ } ) ;
print ( "\n\n" ) ;
}
}
$ wikitypes { $ fn } = $ type ;
$ wikifuncs { $ fn } = \ % sections ;
$ wikisectionorder { $ fn } = \ @ section_order ;
}
closedir ( DH ) ;
if ( $ warn_about_missing ) {
foreach ( keys % wikifuncs ) {
my $ fn = $ _ ;
if ( not defined $ headerfuncs { $ fn } ) {
print ( "WARNING: $fn defined in the wiki but not the headers!\n" ) ;
}
}
foreach ( keys % headerfuncs ) {
my $ fn = $ _ ;
if ( not defined $ wikifuncs { $ fn } ) {
print ( "WARNING: $fn defined in the headers but not the wiki!\n" ) ;
}
}
}
if ( $ copy_direction == 1 ) { # --copy-to-headers
my % changed_headers = ( ) ;
2021-03-25 16:50:18 +00:00
2022-01-06 20:37:05 +00:00
$ dewikify_mode = 'md' ;
2021-03-25 16:50:18 +00:00
$ wordwrap_mode = 'md' ; # the headers use Markdown format.
2021-03-16 17:46:03 +00:00
foreach ( keys % headerfuncs ) {
my $ fn = $ _ ;
next if not defined $ wikifuncs { $ fn } ; # don't have a page for that function, skip it.
my $ wikitype = $ wikitypes { $ fn } ;
my $ sectionsref = $ wikifuncs { $ fn } ;
2023-03-24 08:56:40 +00:00
my $ remarks = $ sectionsref - > { 'Remarks' } ;
my $ params = $ sectionsref - > { 'Function Parameters' } ;
my $ returns = $ sectionsref - > { 'Return Value' } ;
my $ threadsafety = $ sectionsref - > { 'Thread Safety' } ;
my $ version = $ sectionsref - > { 'Version' } ;
my $ related = $ sectionsref - > { 'Related Functions' } ;
my $ deprecated = $ sectionsref - > { 'Deprecated' } ;
my $ brief = $ sectionsref - > { '[Brief]' } ;
2021-03-16 17:46:03 +00:00
my $ addblank = 0 ;
my $ str = '' ;
2021-10-08 18:39:28 +00:00
$ headerfuncshasdoxygen { $ fn } = 1 ; # Added/changed doxygen for this header.
2021-03-16 17:46:03 +00:00
$ brief = dewikify ( $ wikitype , $ brief ) ;
$ brief =~ s/\A(.*?\.) /$1\n/ ; # \brief should only be one sentence, delimited by a period+space. Split if necessary.
my @ briefsplit = split /\n/ , $ brief ;
$ brief = shift @ briefsplit ;
if ( defined $ remarks ) {
$ remarks = join ( "\n" , @ briefsplit ) . dewikify ( $ wikitype , $ remarks ) ;
}
if ( defined $ brief ) {
$ str . = "\n" if $ addblank ; $ addblank = 1 ;
$ str . = wordwrap ( $ brief ) . "\n" ;
}
if ( defined $ remarks ) {
$ str . = "\n" if $ addblank ; $ addblank = 1 ;
$ str . = wordwrap ( $ remarks ) . "\n" ;
}
2021-10-23 18:37:47 +00:00
if ( defined $ deprecated ) {
# !!! FIXME: lots of code duplication in all of these.
$ str . = "\n" if $ addblank ; $ addblank = 1 ;
my $ v = dewikify ( $ wikitype , $ deprecated ) ;
my $ whitespacelen = length ( "\\deprecated" ) + 1 ;
my $ whitespace = ' ' x $ whitespacelen ;
$ v = wordwrap ( $ v , - $ whitespacelen ) ;
my @ desclines = split /\n/ , $ v ;
my $ firstline = shift @ desclines ;
$ str . = "\\deprecated $firstline\n" ;
foreach ( @ desclines ) {
$ str . = "${whitespace}$_\n" ;
}
}
2021-03-16 17:46:03 +00:00
if ( defined $ params ) {
$ str . = "\n" if $ addblank ; $ addblank = ( defined $ returns ) ? 0 : 1 ;
my @ lines = split /\n/ , dewikify ( $ wikitype , $ params ) ;
if ( $ wikitype eq 'mediawiki' ) {
die ( "Unexpected data parsing MediaWiki table" ) if ( shift @ lines ne '{|' ) ; # Dump the '{|' start
while ( scalar ( @ lines ) >= 3 ) {
my $ name = shift @ lines ;
my $ desc = shift @ lines ;
my $ terminator = shift @ lines ; # the '|-' or '|}' line.
last if ( $ terminator ne '|-' ) and ( $ terminator ne '|}' ) ; # we seem to have run out of table.
$ name =~ s/\A\|\s*// ;
$ name =~ s/\A\*\*(.*?)\*\*/$1/ ;
$ name =~ s/\A\'\'\'(.*?)\'\'\'/$1/ ;
$ desc =~ s/\A\|\s*// ;
#print STDERR "FN: $fn NAME: $name DESC: $desc TERM: $terminator\n";
my $ whitespacelen = length ( $ name ) + 8 ;
my $ whitespace = ' ' x $ whitespacelen ;
$ desc = wordwrap ( $ desc , - $ whitespacelen ) ;
my @ desclines = split /\n/ , $ desc ;
my $ firstline = shift @ desclines ;
$ str . = "\\param $name $firstline\n" ;
foreach ( @ desclines ) {
$ str . = "${whitespace}$_\n" ;
}
}
2023-02-24 00:53:53 +00:00
} elsif ( $ wikitype eq 'md' ) {
my $ l ;
$ l = shift @ lines ;
die ( "Unexpected data parsing Markdown table" ) if ( not $ l =~ /\A\s*\|\s*\|\s*\|\s*\Z/ ) ;
$ l = shift @ lines ;
die ( "Unexpected data parsing Markdown table" ) if ( not $ l =~ /\A\s*\|\s*\-*\s*\|\s*\-*\s*\|\s*\Z/ ) ;
while ( scalar ( @ lines ) >= 1 ) {
$ l = shift @ lines ;
if ( $ l =~ /\A\s*\|\s*(.*?)\s*\|\s*(.*?)\s*\|\s*\Z/ ) {
my $ name = $ 1 ;
my $ desc = $ 2 ;
$ name =~ s/\A\*\*(.*?)\*\*/$1/ ;
$ name =~ s/\A\'\'\'(.*?)\'\'\'/$1/ ;
#print STDERR "FN: $fn NAME: $name DESC: $desc\n";
my $ whitespacelen = length ( $ name ) + 8 ;
my $ whitespace = ' ' x $ whitespacelen ;
$ desc = wordwrap ( $ desc , - $ whitespacelen ) ;
my @ desclines = split /\n/ , $ desc ;
my $ firstline = shift @ desclines ;
$ str . = "\\param $name $firstline\n" ;
foreach ( @ desclines ) {
$ str . = "${whitespace}$_\n" ;
}
} else {
last ; # we seem to have run out of table.
}
}
2021-03-16 17:46:03 +00:00
} else {
die ( "write me" ) ;
}
}
if ( defined $ returns ) {
$ str . = "\n" if $ addblank ; $ addblank = 1 ;
my $ r = dewikify ( $ wikitype , $ returns ) ;
my $ retstr = "\\returns" ;
if ( $ r =~ s/\AReturn(s?) // ) {
$ retstr = "\\return$1" ;
}
my $ whitespacelen = length ( $ retstr ) + 1 ;
my $ whitespace = ' ' x $ whitespacelen ;
$ r = wordwrap ( $ r , - $ whitespacelen ) ;
my @ desclines = split /\n/ , $ r ;
my $ firstline = shift @ desclines ;
$ str . = "$retstr $firstline\n" ;
foreach ( @ desclines ) {
$ str . = "${whitespace}$_\n" ;
}
}
2023-01-25 03:13:25 +00:00
if ( defined $ threadsafety ) {
# !!! FIXME: lots of code duplication in all of these.
$ str . = "\n" if $ addblank ; $ addblank = 1 ;
my $ v = dewikify ( $ wikitype , $ threadsafety ) ;
my $ whitespacelen = length ( "\\threadsafety" ) + 1 ;
my $ whitespace = ' ' x $ whitespacelen ;
$ v = wordwrap ( $ v , - $ whitespacelen ) ;
my @ desclines = split /\n/ , $ v ;
my $ firstline = shift @ desclines ;
$ str . = "\\threadsafety $firstline\n" ;
foreach ( @ desclines ) {
$ str . = "${whitespace}$_\n" ;
}
}
2021-03-16 17:46:03 +00:00
if ( defined $ version ) {
# !!! FIXME: lots of code duplication in all of these.
$ str . = "\n" if $ addblank ; $ addblank = 1 ;
my $ v = dewikify ( $ wikitype , $ version ) ;
my $ whitespacelen = length ( "\\since" ) + 1 ;
my $ whitespace = ' ' x $ whitespacelen ;
$ v = wordwrap ( $ v , - $ whitespacelen ) ;
my @ desclines = split /\n/ , $ v ;
my $ firstline = shift @ desclines ;
$ str . = "\\since $firstline\n" ;
foreach ( @ desclines ) {
$ str . = "${whitespace}$_\n" ;
}
}
if ( defined $ related ) {
# !!! FIXME: lots of code duplication in all of these.
$ str . = "\n" if $ addblank ; $ addblank = 1 ;
my $ v = dewikify ( $ wikitype , $ related ) ;
my @ desclines = split /\n/ , $ v ;
foreach ( @ desclines ) {
s/\A(\:|\* )// ;
2021-07-14 13:36:07 +00:00
s/\(\)\Z// ; # Convert "SDL_Func()" to "SDL_Func"
2022-06-19 04:15:03 +00:00
s/\[\[(.*?)\]\]/$1/ ; # in case some wikilinks remain.
2023-02-24 16:45:43 +00:00
s/\[(.*?)\]\(.*?\)/$1/ ; # in case some wikilinks remain.
2022-06-16 03:25:36 +00:00
s/\A\/*// ;
2021-03-16 17:46:03 +00:00
$ str . = "\\sa $_\n" ;
}
}
2021-10-08 18:39:28 +00:00
my $ header = $ headerfuncslocation { $ fn } ;
my $ contentsref = $ headers { $ header } ;
my $ chunk = $ headerfuncschunk { $ fn } ;
2021-03-16 17:46:03 +00:00
my @ lines = split /\n/ , $ str ;
2021-10-08 18:39:28 +00:00
my $ addnewline = ( ( $ chunk > 0 ) && ( $$ contentsref [ $ chunk - 1 ] ne '' ) ) ? "\n" : '' ;
my $ output = "$addnewline/**\n" ;
2021-03-16 17:46:03 +00:00
foreach ( @ lines ) {
chomp ;
s/\s*\Z// ;
if ( $ _ eq '' ) {
$ output . = " *\n" ;
} else {
$ output . = " * $_\n" ;
}
}
$ output . = " */" ;
#print("$fn:\n$output\n\n");
$$ contentsref [ $ chunk ] = $ output ;
#$$contentsref[$chunk+1] = $headerdecls{$fn};
$ changed_headers { $ header } = 1 ;
}
foreach ( keys % changed_headers ) {
2021-10-08 18:39:28 +00:00
my $ header = $ _ ;
# this is kinda inefficient, but oh well.
my @ removelines = ( ) ;
foreach ( keys % headerfuncslocation ) {
my $ fn = $ _ ;
next if $ headerfuncshasdoxygen { $ fn } ;
next if $ headerfuncslocation { $ fn } ne $ header ;
# the index of the blank line we put before the function declaration in case we needed to replace it with new content from the wiki.
push @ removelines , $ headerfuncschunk { $ fn } ;
}
my $ contentsref = $ headers { $ header } ;
foreach ( @ removelines ) {
delete $$ contentsref [ $ _ ] ; # delete DOES NOT RENUMBER existing elements!
}
my $ path = "$incpath/$header.tmp" ;
2021-03-16 17:46:03 +00:00
open ( FH , '>' , $ path ) or die ( "Can't open '$path': $!\n" ) ;
foreach ( @$ contentsref ) {
2021-10-08 18:39:28 +00:00
print FH "$_\n" if defined $ _ ;
2021-03-16 17:46:03 +00:00
}
close ( FH ) ;
2021-10-08 18:39:28 +00:00
rename ( $ path , "$incpath/$header" ) or die ( "Can't rename '$path' to '$incpath/$header': $!\n" ) ;
2021-03-16 17:46:03 +00:00
}
2023-02-28 16:37:46 +00:00
if ( defined $ readmepath ) {
if ( - d $ wikireadmepath ) {
mkdir ( $ readmepath ) ; # just in case
opendir ( DH , $ wikireadmepath ) or die ( "Can't opendir '$wikireadmepath': $!\n" ) ;
while ( readdir ( DH ) ) {
my $ dent = $ _ ;
if ( $ dent =~ /\A(.*?)\.md\Z/ ) { # we only bridge Markdown files here.
2023-02-28 16:55:19 +00:00
next if $ 1 eq 'FrontPage' ;
2023-02-28 17:03:48 +00:00
filecopy ( "$wikireadmepath/$dent" , "$readmepath/README-$dent" , "\r\n" ) ;
2023-02-28 16:37:46 +00:00
}
}
closedir ( DH ) ;
}
}
2021-03-16 17:46:03 +00:00
} elsif ( $ copy_direction == - 1 ) { # --copy-to-wiki
2023-02-24 00:53:53 +00:00
if ( defined $ changeformat ) {
$ dewikify_mode = $ changeformat ;
$ wordwrap_mode = $ changeformat ;
}
2021-03-16 17:46:03 +00:00
foreach ( keys % headerfuncs ) {
my $ fn = $ _ ;
2021-10-08 18:39:28 +00:00
next if not $ headerfuncshasdoxygen { $ fn } ;
2023-02-24 00:53:53 +00:00
my $ origwikitype = defined $ wikitypes { $ fn } ? $ wikitypes { $ fn } : 'md' ; # default to MarkDown for new stuff.
my $ wikitype = ( defined $ changeformat ) ? $ changeformat : $ origwikitype ;
2022-01-06 20:37:05 +00:00
die ( "Unexpected wikitype '$wikitype'\n" ) if ( ( $ wikitype ne 'mediawiki' ) and ( $ wikitype ne 'md' ) and ( $ wikitype ne 'manpage' ) ) ;
2021-03-16 17:46:03 +00:00
2021-03-24 14:45:27 +00:00
#print("$fn\n"); next;
$ wordwrap_mode = $ wikitype ;
2021-03-16 17:46:03 +00:00
my $ raw = $ headerfuncs { $ fn } ; # raw doxygen text with comment characters stripped from start/end and start of each line.
2021-10-08 18:39:28 +00:00
next if not defined $ raw ;
2021-03-16 17:46:03 +00:00
$ raw =~ s/\A\s*\\brief\s+// ; # Technically we don't need \brief (please turn on JAVADOC_AUTOBRIEF if you use Doxygen), so just in case one is present, strip it.
my @ doxygenlines = split /\n/ , $ raw ;
my $ brief = '' ;
while ( @ doxygenlines ) {
last if $ doxygenlines [ 0 ] =~ /\A\\/ ; # some sort of doxygen command, assume we're past the general remarks.
last if $ doxygenlines [ 0 ] =~ /\A\s*\Z/ ; # blank line? End of paragraph, done.
my $ l = shift @ doxygenlines ;
chomp ( $ l ) ;
$ l =~ s/\A\s*// ;
$ l =~ s/\s*\Z// ;
$ brief . = "$l " ;
}
$ brief =~ s/\A(.*?\.) /$1\n\n/ ; # \brief should only be one sentence, delimited by a period+space. Split if necessary.
my @ briefsplit = split /\n/ , $ brief ;
2021-03-24 14:45:27 +00:00
$ brief = wikify ( $ wikitype , shift @ briefsplit ) . "\n" ;
2021-03-16 17:46:03 +00:00
@ doxygenlines = ( @ briefsplit , @ doxygenlines ) ;
my $ remarks = '' ;
2021-07-14 12:11:18 +00:00
# !!! FIXME: wordwrap and wikify might handle this, now.
2021-03-16 17:46:03 +00:00
while ( @ doxygenlines ) {
last if $ doxygenlines [ 0 ] =~ /\A\\/ ; # some sort of doxygen command, assume we're past the general remarks.
my $ l = shift @ doxygenlines ;
2021-07-14 12:11:18 +00:00
if ( $ l =~ /\A\`\`\`/ ) { # syntax highlighting, don't reformat.
$ remarks . = "$l\n" ;
while ( ( @ doxygenlines ) && ( not $ l =~ /\`\`\`\Z/ ) ) {
$ l = shift @ doxygenlines ;
$ remarks . = "$l\n" ;
}
} else {
$ l =~ s/\A\s*// ;
$ l =~ s/\s*\Z// ;
$ remarks . = "$l\n" ;
}
2021-03-16 17:46:03 +00:00
}
2021-07-14 12:11:18 +00:00
#print("REMARKS:\n\n $remarks\n\n");
2021-03-16 17:46:03 +00:00
$ remarks = wordwrap ( wikify ( $ wikitype , $ remarks ) ) ;
$ remarks =~ s/\A\s*// ;
$ remarks =~ s/\s*\Z// ;
my $ decl = $ headerdecls { $ fn } ;
#$decl =~ s/\*\s+SDLCALL/ *SDLCALL/; # Try to make "void * Function" become "void *Function"
2021-10-23 18:37:47 +00:00
#$decl =~ s/\A\s*extern\s+(SDL_DEPRECATED\s+|)DECLSPEC\s+(.*?)\s+(\*?)SDLCALL/$2$3/;
2021-03-16 17:46:03 +00:00
my $ syntax = '' ;
if ( $ wikitype eq 'mediawiki' ) {
$ syntax = "<syntaxhighlight lang='c'>\n$decl</syntaxhighlight>\n" ;
} elsif ( $ wikitype eq 'md' ) {
$ syntax = "```c\n$decl\n```\n" ;
} else { die ( "Expected wikitype '$wikitype'\n" ) ; }
my % sections = ( ) ;
$ sections { '[Brief]' } = $ brief ; # include this section even if blank so we get a title line.
$ sections { 'Remarks' } = "$remarks\n" if $ remarks ne '' ;
$ sections { 'Syntax' } = $ syntax ;
my @ params = ( ) ; # have to parse these and build up the wiki tables after, since Markdown needs to know the length of the largest string. :/
while ( @ doxygenlines ) {
my $ l = shift @ doxygenlines ;
if ( $ l =~ /\A\\param\s+(.*?)\s+(.*)\Z/ ) {
my $ arg = $ 1 ;
my $ desc = $ 2 ;
while ( @ doxygenlines ) {
my $ subline = $ doxygenlines [ 0 ] ;
$ subline =~ s/\A\s*// ;
last if $ subline =~ /\A\\/ ; # some sort of doxygen command, assume we're past this thing.
shift @ doxygenlines ; # dump this line from the array; we're using it.
2021-07-14 15:58:57 +00:00
if ( $ subline eq '' ) { # empty line, make sure it keeps the newline char.
$ desc . = "\n" ;
} else {
$ desc . = " $subline" ;
}
2021-03-16 17:46:03 +00:00
}
2021-07-14 15:58:57 +00:00
$ desc =~ s/[\s\n]+\Z//ms ;
2021-03-16 17:46:03 +00:00
# We need to know the length of the longest string to make Markdown tables, so we just store these off until everything is parsed.
push @ params , $ arg ;
push @ params , $ desc ;
} elsif ( $ l =~ /\A\\r(eturns?)\s+(.*)\Z/ ) {
my $ retstr = "R$1" ; # "Return" or "Returns"
my $ desc = $ 2 ;
while ( @ doxygenlines ) {
my $ subline = $ doxygenlines [ 0 ] ;
$ subline =~ s/\A\s*// ;
last if $ subline =~ /\A\\/ ; # some sort of doxygen command, assume we're past this thing.
shift @ doxygenlines ; # dump this line from the array; we're using it.
2021-07-14 15:58:57 +00:00
if ( $ subline eq '' ) { # empty line, make sure it keeps the newline char.
$ desc . = "\n" ;
} else {
$ desc . = " $subline" ;
}
2021-03-16 17:46:03 +00:00
}
2021-07-14 15:58:57 +00:00
$ desc =~ s/[\s\n]+\Z//ms ;
$ sections { 'Return Value' } = wordwrap ( "$retstr " . wikify ( $ wikitype , $ desc ) ) . "\n" ;
2021-10-23 18:37:47 +00:00
} elsif ( $ l =~ /\A\\deprecated\s+(.*)\Z/ ) {
my $ desc = $ 1 ;
while ( @ doxygenlines ) {
my $ subline = $ doxygenlines [ 0 ] ;
$ subline =~ s/\A\s*// ;
last if $ subline =~ /\A\\/ ; # some sort of doxygen command, assume we're past this thing.
shift @ doxygenlines ; # dump this line from the array; we're using it.
if ( $ subline eq '' ) { # empty line, make sure it keeps the newline char.
$ desc . = "\n" ;
} else {
$ desc . = " $subline" ;
}
}
$ desc =~ s/[\s\n]+\Z//ms ;
$ sections { 'Deprecated' } = wordwrap ( wikify ( $ wikitype , $ desc ) ) . "\n" ;
2021-03-16 17:46:03 +00:00
} elsif ( $ l =~ /\A\\since\s+(.*)\Z/ ) {
my $ desc = $ 1 ;
while ( @ doxygenlines ) {
my $ subline = $ doxygenlines [ 0 ] ;
$ subline =~ s/\A\s*// ;
last if $ subline =~ /\A\\/ ; # some sort of doxygen command, assume we're past this thing.
shift @ doxygenlines ; # dump this line from the array; we're using it.
2021-07-14 15:58:57 +00:00
if ( $ subline eq '' ) { # empty line, make sure it keeps the newline char.
$ desc . = "\n" ;
} else {
$ desc . = " $subline" ;
}
2021-03-16 17:46:03 +00:00
}
2021-07-14 15:58:57 +00:00
$ desc =~ s/[\s\n]+\Z//ms ;
$ sections { 'Version' } = wordwrap ( wikify ( $ wikitype , $ desc ) ) . "\n" ;
2023-01-25 03:13:25 +00:00
} elsif ( $ l =~ /\A\\threadsafety\s+(.*)\Z/ ) {
my $ desc = $ 1 ;
while ( @ doxygenlines ) {
my $ subline = $ doxygenlines [ 0 ] ;
$ subline =~ s/\A\s*// ;
last if $ subline =~ /\A\\/ ; # some sort of doxygen command, assume we're past this thing.
shift @ doxygenlines ; # dump this line from the array; we're using it.
if ( $ subline eq '' ) { # empty line, make sure it keeps the newline char.
$ desc . = "\n" ;
} else {
$ desc . = " $subline" ;
}
}
$ desc =~ s/[\s\n]+\Z//ms ;
$ sections { 'Thread Safety' } = wordwrap ( wikify ( $ wikitype , $ desc ) ) . "\n" ;
2021-03-16 17:46:03 +00:00
} elsif ( $ l =~ /\A\\sa\s+(.*)\Z/ ) {
my $ sa = $ 1 ;
2021-07-14 13:36:07 +00:00
$ sa =~ s/\(\)\Z// ; # Convert "SDL_Func()" to "SDL_Func"
2021-03-16 17:46:03 +00:00
$ sections { 'Related Functions' } = '' if not defined $ sections { 'Related Functions' } ;
if ( $ wikitype eq 'mediawiki' ) {
$ sections { 'Related Functions' } . = ":[[$sa]]\n" ;
} elsif ( $ wikitype eq 'md' ) {
2022-06-16 03:25:36 +00:00
$ sections { 'Related Functions' } . = "* [$sa]($sa)\n" ;
2021-03-16 17:46:03 +00:00
} else { die ( "Expected wikitype '$wikitype'\n" ) ; }
}
}
2021-03-24 14:45:27 +00:00
# Make sure this ends with a double-newline.
$ sections { 'Related Functions' } . = "\n" if defined $ sections { 'Related Functions' } ;
2021-03-16 17:46:03 +00:00
# We can build the wiki table now that we have all the data.
if ( scalar ( @ params ) > 0 ) {
my $ str = '' ;
if ( $ wikitype eq 'mediawiki' ) {
while ( scalar ( @ params ) > 0 ) {
my $ arg = shift @ params ;
2021-03-24 14:45:27 +00:00
my $ desc = wikify ( $ wikitype , shift @ params ) ;
2021-03-16 17:46:03 +00:00
$ str . = ( $ str eq '' ) ? "{|\n" : "|-\n" ;
$ str . = "|'''$arg'''\n" ;
$ str . = "|$desc\n" ;
}
$ str . = "|}\n" ;
} elsif ( $ wikitype eq 'md' ) {
my $ longest_arg = 0 ;
my $ longest_desc = 0 ;
my $ which = 0 ;
foreach ( @ params ) {
if ( $ which == 0 ) {
2021-03-24 14:45:27 +00:00
my $ len = length ( $ _ ) + 4 ;
2021-03-16 17:46:03 +00:00
$ longest_arg = $ len if ( $ len > $ longest_arg ) ;
$ which = 1 ;
} else {
2021-03-24 14:45:27 +00:00
my $ len = length ( wikify ( $ wikitype , $ _ ) ) ;
2021-03-16 17:46:03 +00:00
$ longest_desc = $ len if ( $ len > $ longest_desc ) ;
$ which = 0 ;
}
}
# Markdown tables are sort of obnoxious.
$ str . = '| ' . ( ' ' x ( $ longest_arg + 4 ) ) . ' | ' . ( ' ' x $ longest_desc ) . " |\n" ;
$ str . = '| ' . ( '-' x ( $ longest_arg + 4 ) ) . ' | ' . ( '-' x $ longest_desc ) . " |\n" ;
while ( @ params ) {
my $ arg = shift @ params ;
2021-03-24 14:45:27 +00:00
my $ desc = wikify ( $ wikitype , shift @ params ) ;
2021-03-16 17:46:03 +00:00
$ str . = "| **$arg** " . ( ' ' x ( $ longest_arg - length ( $ arg ) ) ) . "| $desc" . ( ' ' x ( $ longest_desc - length ( $ desc ) ) ) . " |\n" ;
}
} else {
die ( "Unexpected wikitype!\n" ) ; # should have checked this elsewhere.
}
$ sections { 'Function Parameters' } = $ str ;
}
my $ path = "$wikipath/$_.${wikitype}.tmp" ;
open ( FH , '>' , $ path ) or die ( "Can't open '$path': $!\n" ) ;
my $ sectionsref = $ wikifuncs { $ fn } ;
foreach ( @ standard_wiki_sections ) {
# drop sections we either replaced or removed from the original wiki's contents.
2021-03-24 14:46:05 +00:00
if ( not defined $ only_wiki_sections { $ _ } ) {
delete ( $$ sectionsref { $ _ } ) ;
}
2021-03-16 17:46:03 +00:00
}
my $ wikisectionorderref = $ wikisectionorder { $ fn } ;
2021-09-01 04:19:54 +00:00
# Make sure there's a footer in the wiki that puts this function in CategoryAPI...
if ( not $$ sectionsref { '[footer]' } ) {
$$ sectionsref { '[footer]' } = '' ;
push @$ wikisectionorderref , '[footer]' ;
}
2023-02-24 00:53:53 +00:00
# If changing format, convert things that otherwise are passed through unmolested.
if ( defined $ changeformat ) {
if ( ( $ dewikify_mode eq 'md' ) and ( $ origwikitype eq 'mediawiki' ) ) {
$$ sectionsref { '[footer]' } =~ s/\[\[(Category[a-zA-Z0-9_]+)\]\]/[$1]($1)/g ;
} elsif ( ( $ dewikify_mode eq 'mediawiki' ) and ( $ origwikitype eq 'md' ) ) {
$$ sectionsref { '[footer]' } =~ s/\[(Category[a-zA-Z0-9_]+)\]\(.*?\)/[[$1]]/g ;
}
foreach ( keys % only_wiki_sections ) {
my $ sect = $ _ ;
if ( defined $$ sectionsref { $ sect } ) {
$$ sectionsref { $ sect } = wikify ( $ wikitype , dewikify ( $ origwikitype , $$ sectionsref { $ sect } ) ) ;
}
}
}
2021-09-01 04:19:54 +00:00
# !!! FIXME: This won't be CategoryAPI if we eventually handle things other than functions.
my $ footer = $$ sectionsref { '[footer]' } ;
2023-02-24 00:53:53 +00:00
2021-09-01 04:19:54 +00:00
if ( $ wikitype eq 'mediawiki' ) {
$ footer =~ s/\[\[CategoryAPI\]\],?\s*//g ;
$ footer = '[[CategoryAPI]]' . ( ( $ footer eq '' ) ? "\n" : ", $footer" ) ;
} elsif ( $ wikitype eq 'md' ) {
$ footer =~ s/\[CategoryAPI\]\(CategoryAPI\),?\s*//g ;
$ footer = '[CategoryAPI](CategoryAPI)' . ( ( $ footer eq '' ) ? '' : ', ' ) . $ footer ;
} else { die ( "Unexpected wikitype '$wikitype'\n" ) ; }
$$ sectionsref { '[footer]' } = $ footer ;
2022-06-17 18:39:50 +00:00
if ( defined $ wikipreamble ) {
2023-01-26 15:19:23 +00:00
my $ wikified_preamble = wikify ( $ wikitype , $ wikipreamble ) ;
2022-06-17 18:39:50 +00:00
if ( $ wikitype eq 'mediawiki' ) {
2023-01-26 15:19:23 +00:00
print FH "====== $wikified_preamble ======\n" ;
2022-06-17 18:39:50 +00:00
} elsif ( $ wikitype eq 'md' ) {
2023-01-26 15:19:23 +00:00
print FH "###### $wikified_preamble\n" ;
2022-06-17 18:39:50 +00:00
} else { die ( "Unexpected wikitype '$wikitype'\n" ) ; }
}
2021-09-01 04:19:54 +00:00
my $ prevsectstr = '' ;
my @ ordered_sections = ( @ standard_wiki_sections , defined $ wikisectionorderref ? @$ wikisectionorderref : ( ) ) ; # this copies the arrays into one.
2021-03-16 17:46:03 +00:00
foreach ( @ ordered_sections ) {
my $ sect = $ _ ;
next if $ sect eq '[start]' ;
next if ( not defined $ sections { $ sect } and not defined $$ sectionsref { $ sect } ) ;
my $ section = defined $ sections { $ sect } ? $ sections { $ sect } : $$ sectionsref { $ sect } ;
if ( $ sect eq '[footer]' ) {
2021-09-01 04:19:54 +00:00
# Make sure previous section ends with two newlines.
if ( substr ( $ prevsectstr , - 1 ) ne "\n" ) {
print FH "\n\n" ;
} elsif ( substr ( $ prevsectstr , - 2 ) ne "\n\n" ) {
print FH "\n" ;
}
2021-03-16 17:46:03 +00:00
print FH "----\n" ; # It's the same in Markdown and MediaWiki.
} elsif ( $ sect eq '[Brief]' ) {
if ( $ wikitype eq 'mediawiki' ) {
print FH "= $fn =\n\n" ;
} elsif ( $ wikitype eq 'md' ) {
print FH "# $fn\n\n" ;
2021-09-01 04:19:54 +00:00
} else { die ( "Unexpected wikitype '$wikitype'\n" ) ; }
2021-03-16 17:46:03 +00:00
} else {
if ( $ wikitype eq 'mediawiki' ) {
print FH "\n== $sect ==\n\n" ;
} elsif ( $ wikitype eq 'md' ) {
print FH "\n## $sect\n\n" ;
2021-09-01 04:19:54 +00:00
} else { die ( "Unexpected wikitype '$wikitype'\n" ) ; }
2021-03-16 17:46:03 +00:00
}
2021-09-01 04:19:54 +00:00
my $ sectstr = defined $ sections { $ sect } ? $ sections { $ sect } : $$ sectionsref { $ sect } ;
print FH $ sectstr ;
$ prevsectstr = $ sectstr ;
2021-03-16 17:46:03 +00:00
# make sure these don't show up twice.
delete ( $ sections { $ sect } ) ;
delete ( $$ sectionsref { $ sect } ) ;
}
print FH "\n\n" ;
close ( FH ) ;
2023-02-24 00:53:53 +00:00
if ( defined $ changeformat and ( $ origwikitype ne $ wikitype ) ) {
system ( "cd '$wikipath' ; git mv '$_.${origwikitype}' '$_.${wikitype}'" ) ;
unlink ( "$wikipath/$_.${origwikitype}" ) ;
}
2021-03-16 17:46:03 +00:00
rename ( $ path , "$wikipath/$_.${wikitype}" ) or die ( "Can't rename '$path' to '$wikipath/$_.${wikitype}': $!\n" ) ;
}
2022-01-06 20:37:05 +00:00
2023-02-28 16:37:46 +00:00
if ( defined $ readmepath ) {
if ( - d $ readmepath ) {
mkdir ( $ wikireadmepath ) ; # just in case
opendir ( DH , $ readmepath ) or die ( "Can't opendir '$readmepath': $!\n" ) ;
2023-03-24 08:56:40 +00:00
while ( my $ d = readdir ( DH ) ) {
my $ dent = $ d ;
2023-02-28 16:37:46 +00:00
if ( $ dent =~ /\AREADME\-(.*?\.md)\Z/ ) { # we only bridge Markdown files here.
my $ wikifname = $ 1 ;
2023-02-28 16:55:19 +00:00
next if $ wikifname eq 'FrontPage.md' ;
2023-02-28 17:03:48 +00:00
filecopy ( "$readmepath/$dent" , "$wikireadmepath/$wikifname" , "\n" ) ;
2023-02-28 16:37:46 +00:00
}
}
closedir ( DH ) ;
2023-02-28 17:26:31 +00:00
my @ pages = ( ) ;
2023-02-28 16:37:46 +00:00
opendir ( DH , $ wikireadmepath ) or die ( "Can't opendir '$wikireadmepath': $!\n" ) ;
2023-03-24 08:56:40 +00:00
while ( my $ d = readdir ( DH ) ) {
my $ dent = $ d ;
2023-02-28 16:37:46 +00:00
if ( $ dent =~ /\A(.*?)\.(mediawiki|md)\Z/ ) {
my $ wikiname = $ 1 ;
2023-02-28 16:55:19 +00:00
next if $ wikiname eq 'FrontPage' ;
2023-02-28 17:26:31 +00:00
push @ pages , $ wikiname ;
2023-02-28 16:37:46 +00:00
}
}
closedir ( DH ) ;
2023-02-28 17:26:31 +00:00
open ( FH , '>' , "$wikireadmepath/FrontPage.md" ) or die ( "Can't open '$wikireadmepath/FrontPage.md': $!\n" ) ;
print FH "# All READMEs available here\n\n" ;
foreach ( sort @ pages ) {
my $ wikiname = $ _ ;
print FH "- [$wikiname]($wikiname)\n" ;
}
2023-02-28 16:37:46 +00:00
close ( FH ) ;
}
}
2022-01-06 20:37:05 +00:00
} elsif ( $ copy_direction == - 2 ) { # --copy-to-manpages
# This only takes from the wiki data, since it has sections we omit from the headers, like code examples.
my $ manpath = "$srcpath/man" ;
mkdir ( $ manpath ) ;
2022-01-08 02:31:38 +00:00
$ manpath . = "/man3" ;
mkdir ( $ manpath ) ;
2022-01-06 20:37:05 +00:00
$ dewikify_mode = 'manpage' ;
$ wordwrap_mode = 'manpage' ;
my $ introtxt = '' ;
if ( 0 ) {
open ( FH , '<' , "$srcpath/LICENSE.txt" ) or die ( "Can't open '$srcpath/LICENSE.txt': $!\n" ) ;
while ( <FH> ) {
chomp ;
$ introtxt . = ".\\\" $_\n" ;
}
close ( FH ) ;
}
my $ gitrev = `cd "$srcpath" ; git rev-list HEAD~..` ;
chomp ( $ gitrev ) ;
2022-06-16 03:25:36 +00:00
# !!! FIXME
open ( FH , '<' , "$srcpath/$versionfname" ) or die ( "Can't open '$srcpath/$versionfname': $!\n" ) ;
2022-01-06 20:37:05 +00:00
my $ majorver = 0 ;
my $ minorver = 0 ;
my $ patchver = 0 ;
while ( <FH> ) {
chomp ;
2022-06-16 03:25:36 +00:00
if ( /$versionmajorregex/ ) {
2022-01-06 20:37:05 +00:00
$ majorver = int ( $ 1 ) ;
2022-06-16 03:25:36 +00:00
} elsif ( /$versionminorregex/ ) {
2022-01-06 20:37:05 +00:00
$ minorver = int ( $ 1 ) ;
2022-06-16 03:25:36 +00:00
} elsif ( /$versionpatchregex/ ) {
2022-01-06 20:37:05 +00:00
$ patchver = int ( $ 1 ) ;
}
}
close ( FH ) ;
2022-06-16 03:25:36 +00:00
my $ fullversion = "$majorver.$minorver.$patchver" ;
2022-01-06 20:37:05 +00:00
foreach ( keys % headerfuncs ) {
my $ fn = $ _ ;
next if not defined $ wikifuncs { $ fn } ; # don't have a page for that function, skip it.
my $ wikitype = $ wikitypes { $ fn } ;
my $ sectionsref = $ wikifuncs { $ fn } ;
2023-03-24 08:56:40 +00:00
my $ remarks = $ sectionsref - > { 'Remarks' } ;
my $ params = $ sectionsref - > { 'Function Parameters' } ;
my $ returns = $ sectionsref - > { 'Return Value' } ;
my $ version = $ sectionsref - > { 'Version' } ;
my $ threadsafety = $ sectionsref - > { 'Thread Safety' } ;
my $ related = $ sectionsref - > { 'Related Functions' } ;
my $ examples = $ sectionsref - > { 'Code Examples' } ;
my $ deprecated = $ sectionsref - > { 'Deprecated' } ;
my $ brief = $ sectionsref - > { '[Brief]' } ;
2022-01-06 20:37:05 +00:00
my $ decl = $ headerdecls { $ fn } ;
my $ str = '' ;
$ brief = "$brief" ;
$ brief =~ s/\A[\s\n]*\= .*? \=\s*?\n+//ms ;
$ brief =~ s/\A[\s\n]*\=\= .*? \=\=\s*?\n+//ms ;
$ brief =~ s/\A(.*?\.) /$1\n/ ; # \brief should only be one sentence, delimited by a period+space. Split if necessary.
my @ briefsplit = split /\n/ , $ brief ;
$ brief = shift @ briefsplit ;
$ brief = dewikify ( $ wikitype , $ brief ) ;
if ( defined $ remarks ) {
$ remarks = dewikify ( $ wikitype , join ( "\n" , @ briefsplit ) . $ remarks ) ;
}
$ str . = $ introtxt ;
$ str . = ".\\\" This manpage content is licensed under Creative Commons\n" ;
$ str . = ".\\\" Attribution 4.0 International (CC BY 4.0)\n" ;
$ str . = ".\\\" https://creativecommons.org/licenses/by/4.0/\n" ;
2022-06-16 03:25:36 +00:00
$ str . = ".\\\" This manpage was generated from ${projectshortname}'s wiki page for $fn:\n" ;
$ str . = ".\\\" $wikiurl/$fn\n" ;
2022-01-06 20:37:05 +00:00
$ str . = ".\\\" Generated with SDL/build-scripts/wikiheaders.pl\n" ;
$ str . = ".\\\" revision $gitrev\n" if $ gitrev ne '' ;
$ str . = ".\\\" Please report issues in this manpage's content at:\n" ;
2022-06-16 03:25:36 +00:00
$ str . = ".\\\" $bugreporturl\n" ;
2022-01-06 20:37:05 +00:00
$ str . = ".\\\" Please report issues in the generation of this manpage from the wiki at:\n" ;
$ str . = ".\\\" https://github.com/libsdl-org/SDL/issues/new?title=Misgenerated%20manpage%20for%20$fn\n" ;
2022-06-16 03:25:36 +00:00
$ str . = ".\\\" $projectshortname can be found at $projecturl\n" ;
2022-01-06 20:37:05 +00:00
2022-05-25 14:42:11 +00:00
# Define a .URL macro. The "www.tmac" thing decides if we're using GNU roff (which has a .URL macro already), and if so, overrides the macro we just created.
# This wizadry is from https://web.archive.org/web/20060102165607/http://people.debian.org/~branden/talks/wtfm/wtfm.pdf
$ str . = ".de URL\n" ;
$ str . = '\\$2 \(laURL: \\$1 \(ra\\$3' . "\n" ;
$ str . = "..\n" ;
$ str . = '.if \n[.g] .mso www.tmac' . "\n" ;
2022-06-16 03:25:36 +00:00
$ str . = ".TH $fn 3 \"$projectshortname $fullversion\" \"$projectfullname\" \"$projectshortname$majorver FUNCTIONS\"\n" ;
2022-01-06 20:37:05 +00:00
$ str . = ".SH NAME\n" ;
$ str . = "$fn" ;
$ str . = " \\- $brief" if ( defined $ brief ) ;
$ str . = "\n" ;
$ str . = ".SH SYNOPSIS\n" ;
$ str . = ".nf\n" ;
2022-06-16 03:25:36 +00:00
$ str . = ".B #include \\(dq$mainincludefname\\(dq\n" ;
2022-01-06 20:37:05 +00:00
$ str . = ".PP\n" ;
my @ decllines = split /\n/ , $ decl ;
foreach ( @ decllines ) {
$ str . = ".BI \"$_\n" ;
}
$ str . = ".fi\n" ;
if ( defined $ remarks ) {
$ str . = ".SH DESCRIPTION\n" ;
$ str . = $ remarks . "\n" ;
}
if ( defined $ deprecated ) {
$ str . = ".SH DEPRECATED\n" ;
$ str . = dewikify ( $ wikitype , $ deprecated ) . "\n" ;
}
if ( defined $ params ) {
$ str . = ".SH FUNCTION PARAMETERS\n" ;
my @ lines = split /\n/ , $ params ;
if ( $ wikitype eq 'mediawiki' ) {
die ( "Unexpected data parsing MediaWiki table" ) if ( shift @ lines ne '{|' ) ; # Dump the '{|' start
while ( scalar ( @ lines ) >= 3 ) {
my $ name = shift @ lines ;
my $ desc = shift @ lines ;
my $ terminator = shift @ lines ; # the '|-' or '|}' line.
last if ( $ terminator ne '|-' ) and ( $ terminator ne '|}' ) ; # we seem to have run out of table.
$ name =~ s/\A\|\s*// ;
$ name =~ s/\A\*\*(.*?)\*\*/$1/ ;
$ name =~ s/\A\'\'\'(.*?)\'\'\'/$1/ ;
$ desc =~ s/\A\|\s*// ;
$ desc = dewikify ( $ wikitype , $ desc ) ;
#print STDERR "FN: $fn NAME: $name DESC: $desc TERM: $terminator\n";
$ str . = ".TP\n" ;
$ str . = ".I $name\n" ;
$ str . = "$desc\n" ;
}
2023-02-24 15:21:32 +00:00
} elsif ( $ wikitype eq 'md' ) {
my $ l ;
$ l = shift @ lines ;
die ( "Unexpected data parsing Markdown table" ) if ( not $ l =~ /\A\s*\|\s*\|\s*\|\s*\Z/ ) ;
$ l = shift @ lines ;
die ( "Unexpected data parsing Markdown table" ) if ( not $ l =~ /\A\s*\|\s*\-*\s*\|\s*\-*\s*\|\s*\Z/ ) ;
while ( scalar ( @ lines ) >= 1 ) {
$ l = shift @ lines ;
if ( $ l =~ /\A\s*\|\s*(.*?)\s*\|\s*(.*?)\s*\|\s*\Z/ ) {
my $ name = $ 1 ;
my $ desc = $ 2 ;
$ name =~ s/\A\*\*(.*?)\*\*/$1/ ;
$ name =~ s/\A\'\'\'(.*?)\'\'\'/$1/ ;
$ desc = dewikify ( $ wikitype , $ desc ) ;
$ str . = ".TP\n" ;
$ str . = ".I $name\n" ;
$ str . = "$desc\n" ;
} else {
last ; # we seem to have run out of table.
}
}
2022-01-06 20:37:05 +00:00
} else {
die ( "write me" ) ;
}
}
if ( defined $ returns ) {
$ str . = ".SH RETURN VALUE\n" ;
$ str . = dewikify ( $ wikitype , $ returns ) . "\n" ;
}
if ( defined $ examples ) {
$ str . = ".SH CODE EXAMPLES\n" ;
$ dewikify_manpage_code_indent = 0 ;
$ str . = dewikify ( $ wikitype , $ examples ) . "\n" ;
$ dewikify_manpage_code_indent = 1 ;
}
2023-01-25 03:13:25 +00:00
if ( defined $ threadsafety ) {
$ str . = ".SH THREAD SAFETY\n" ;
$ str . = dewikify ( $ wikitype , $ threadsafety ) . "\n" ;
}
2022-01-06 20:37:05 +00:00
if ( defined $ version ) {
$ str . = ".SH AVAILABILITY\n" ;
$ str . = dewikify ( $ wikitype , $ version ) . "\n" ;
}
if ( defined $ related ) {
$ str . = ".SH SEE ALSO\n" ;
# !!! FIXME: lots of code duplication in all of these.
my $ v = dewikify ( $ wikitype , $ related ) ;
my @ desclines = split /\n/ , $ v ;
my $ nextstr = '' ;
foreach ( @ desclines ) {
s/\A(\:|\* )// ;
s/\(\)\Z// ; # Convert "SDL_Func()" to "SDL_Func"
2022-06-19 04:15:03 +00:00
s/\[\[(.*?)\]\]/$1/ ; # in case some wikilinks remain.
2023-02-24 16:45:43 +00:00
s/\[(.*?)\]\(.*?\)/$1/ ; # in case some wikilinks remain.
2023-02-24 15:21:32 +00:00
s/\A\*\s*\Z// ;
2022-06-16 03:25:36 +00:00
s/\A\/*// ;
2022-01-06 20:37:05 +00:00
s/\A\.BR\s+// ; # dewikify added this, but we want to handle it.
2023-02-24 15:21:32 +00:00
s/\A\.I\s+// ; # dewikify added this, but we want to handle it.
2022-01-06 20:37:05 +00:00
s/\A\s+// ;
s/\s+\Z// ;
next if $ _ eq '' ;
$ str . = "$nextstr.BR $_ (3)" ;
$ nextstr = ",\n" ;
}
$ str . = "\n" ;
}
if ( 0 ) {
$ str . = ".SH COPYRIGHT\n" ;
$ str . = "This manpage is licensed under\n" ;
$ str . = ".UR https://creativecommons.org/licenses/by/4.0/\n" ;
$ str . = "Creative Commons Attribution 4.0 International (CC BY 4.0)\n" ;
$ str . = ".UE\n" ;
$ str . = ".PP\n" ;
$ str . = "This manpage was generated from\n" ;
2022-06-16 03:25:36 +00:00
$ str . = ".UR $wikiurl/$fn\n" ;
$ str . = "${projectshortname}'s wiki\n" ;
2022-01-06 20:37:05 +00:00
$ str . = ".UE\n" ;
$ str . = "using SDL/build-scripts/wikiheaders.pl" ;
$ str . = " revision $gitrev" if $ gitrev ne '' ;
$ str . = ".\n" ;
$ str . = "Please report issues in this manpage at\n" ;
2022-06-16 03:25:36 +00:00
$ str . = ".UR $bugreporturl\n" ;
2022-01-06 20:37:05 +00:00
$ str . = "our bugtracker!\n" ;
$ str . = ".UE\n" ;
}
my $ path = "$manpath/$_.3.tmp" ;
open ( FH , '>' , $ path ) or die ( "Can't open '$path': $!\n" ) ;
print FH $ str ;
close ( FH ) ;
rename ( $ path , "$manpath/$_.3" ) or die ( "Can't rename '$path' to '$manpath/$_.3': $!\n" ) ;
}
2021-03-16 17:46:03 +00:00
}
# end of wikiheaders.pl ...