pax_global_header00006660000000000000000000000064116060730230014510gustar00rootroot0000000000000052 comment=529c81d015daac919a246970857c0a9be81869dc prophet-0.750/000077500000000000000000000000001160607302300132045ustar00rootroot00000000000000prophet-0.750/Changes000066400000000000000000000100011160607302300144670ustar00rootroot000000000000000.750 * --progress option to publish command enables rsync's publish flag (Lance Wicks) * PROPHET_EDITOR variable allows customization of editor (Rob Hoelz) * add the ability to color format strings (Christine Spang) * allow one to configure the default server port with server.default-port (Christine Spang) * server command now bails out with a helpful message when port in use (Christine Spang) * don't blindly override $ENV{LESS} or $ENV{MORE} (Jesse Vincent) * various performance improvements (Jesse Vincent) 0.743 * Fix test failures so install works again -- spang 0.742 * Deal with Path::Dispatcher::Declarative being split from P::D --jesse 0.741 * Actually ship all the javascript and css files for the web frontend. (Christine Spang) * bash and zsh completion, see doc/tab-completion for how to enable (Shawn Moore, Kevin Falcone) 0.74 User-visible highlights for this release (not all commits are listed here): * Solve double-prompting for username/password in foreign syncs - Christine Spang * Fixes for Moose compatibility - Christine Spang * Unbreak with Mouse > 0.40 - Florian Ragwitz * Some code from SD had accidentially snuck into prophet. rafl++ for alerting me - Jesse Vincent * make menu links relative - Jesse Vincent * propagate "server" into child menus - Jesse Vincent * Announce project_name via Bonjour - Pedro Melo * allow apps to skip overriding dispatcher_class - Ruslan Zakirov * return that file doesn't exist only when lwp_get returns undef - Ruslan Zakirov * Add tab completion for prophet shell - Shawn Moore * check and store in config username and secret token - franck cuny * add --as to clone - franck cuny Thanks to the following people who contributed to this release: Alex Vandiver, Christine Spang, Florian Ragwitz, Jesse Vincent, Pedro Melo, Ruslan Zakirov, Shawn Moore, and franck cuny. 0.73 * Reimplement alias expansion in terms of lists of argument words. - Nelson Elhage * First pass at improving UTF8 output in static web views - Jesse Vincent 0.72 Fri Sep 4 13:20:16 EDT 2009 * fix sqlite replica: original_sequence_no can be 0 - sunnavy * add inc/ back, we should keep it in repo - sunnavy * Added a couple debugging tools - dump_changesets.idx dump_record.idx - Jesse Vincent * Prophet::CLI::RecordCommand now checks to make sure you've asked it to operate on a record that actually exists. - Jesse Vincent * Added a Prophet::Record API for "does this exist?" - Jesse Vincent * fixing old docs that were out of date - Jesse Vincent * Made sure that sqlite replicas userdata keys are always lowercase - Jesse Vincent * Made an "is this replica me?" query case insensitive. - Jesse Vincent * Prophet::App now has a friendly name for "was asked to characterize an undef replica" - Jesse Vincent * Prophet::FilesystemReplia's local metadata is now case insensitive - Jesse Vincent * Failing tests proving that local metadata isn't case insensitive - Jesse Vincent * more notes to the long and ugly alias value stuff parser - sunnavy * better alias value parse: to handle ' and " - sunnavy * add aliases test with quotes - sunnavy * Support var-args aliases. - Nelson Elhage * Improve argument expansion in aliases. - Nelson Elhage * Only expand aliases on word boundaries. - Nelson Elhage * Remove an unnecessary 'no strict "refs"'. - Nelson Elhage * Switch to UUID::Tiny 1.02 and remove our temporary fork - Christine Spang * Reload config after editing (needed in shell) - Christine Spang * Better error message when pushing to non-existant replica - Christine Spang * Web templates now default to utf8 - Jesse Vincent 0.71 Sat Aug 29 23:30:09 EDT 2009 Added --local to prophet clone: list local Bonjour sources - Pedro Melo Extract out code for the filesystem replica backends so we can implement an ssh personality for them. 0.70 - 2009-08-26 * Performance improvements for pull-over-HTTP * Small documentation, error message and warnings cleanups 0.69_01 - 2009-08-21 * Initial release - dev release to do CPAN smoking before official release prophet-0.750/MANIFEST000066400000000000000000000135351160607302300143440ustar00rootroot00000000000000bin/dump_changesets.idx bin/dump_record.idx bin/prophet bin/run_test_yml.pl bin/taste_recipe Changes doc/foreign-replicas doc/glossary doc/jesse_test_env_setup doc/luid doc/merging-and-conflicts doc/notes-on-merging doc/repository-layout doc/tab-completion doc/todo doc/web_form_handling etc/prophet.bash etc/prophet.zsh inc/Module/AutoInstall.pm inc/Module/Install.pm inc/Module/Install/AutoInstall.pm inc/Module/Install/Base.pm inc/Module/Install/Can.pm inc/Module/Install/Fetch.pm inc/Module/Install/Include.pm inc/Module/Install/Makefile.pm inc/Module/Install/Metadata.pm inc/Module/Install/Share.pm inc/Module/Install/Win32.pm inc/Module/Install/WriteAll.pm lib/Prophet.pm lib/Prophet/App.pm lib/Prophet/Change.pm lib/Prophet/ChangeSet.pm lib/Prophet/CLI.pm lib/Prophet/CLI/CollectionCommand.pm lib/Prophet/CLI/Command.pm lib/Prophet/CLI/Command/Aliases.pm lib/Prophet/CLI/Command/Clone.pm lib/Prophet/CLI/Command/Config.pm lib/Prophet/CLI/Command/Create.pm lib/Prophet/CLI/Command/Delete.pm lib/Prophet/CLI/Command/Export.pm lib/Prophet/CLI/Command/History.pm lib/Prophet/CLI/Command/Info.pm lib/Prophet/CLI/Command/Init.pm lib/Prophet/CLI/Command/Log.pm lib/Prophet/CLI/Command/Merge.pm lib/Prophet/CLI/Command/Mirror.pm lib/Prophet/CLI/Command/Publish.pm lib/Prophet/CLI/Command/Pull.pm lib/Prophet/CLI/Command/Push.pm lib/Prophet/CLI/Command/Search.pm lib/Prophet/CLI/Command/Server.pm lib/Prophet/CLI/Command/Settings.pm lib/Prophet/CLI/Command/Shell.pm lib/Prophet/CLI/Command/Show.pm lib/Prophet/CLI/Command/Update.pm lib/Prophet/CLI/Dispatcher.pm lib/Prophet/CLI/Dispatcher/Rule.pm lib/Prophet/CLI/Dispatcher/Rule/RecordId.pm lib/Prophet/CLI/MirrorCommand.pm lib/Prophet/CLI/Parameters.pm lib/Prophet/CLI/ProgressBar.pm lib/Prophet/CLI/PublishCommand.pm lib/Prophet/CLI/RecordCommand.pm lib/Prophet/CLI/TextEditorCommand.pm lib/Prophet/CLIContext.pm lib/Prophet/Collection.pm lib/Prophet/Config.pm lib/Prophet/Conflict.pm lib/Prophet/ConflictingChange.pm lib/Prophet/ConflictingPropChange.pm lib/Prophet/ContentAddressedStore.pm lib/Prophet/DatabaseSetting.pm lib/Prophet/FilesystemReplica.pm lib/Prophet/ForeignReplica.pm lib/Prophet/Manual.pod lib/Prophet/Meta/Types.pm lib/Prophet/PropChange.pm lib/Prophet/Record.pm lib/Prophet/Replica.pm lib/Prophet/Replica/file.pm lib/Prophet/Replica/FS/Backend/File.pm lib/Prophet/Replica/FS/Backend/LWP.pm lib/Prophet/Replica/FS/Backend/SSH.pm lib/Prophet/Replica/http.pm lib/Prophet/Replica/prophet.pm lib/Prophet/Replica/prophet_cache.pm lib/Prophet/Replica/sqlite.pm lib/Prophet/ReplicaExporter.pm lib/Prophet/ReplicaFeedExporter.pm lib/Prophet/Resolver.pm lib/Prophet/Resolver/AlwaysSource.pm lib/Prophet/Resolver/AlwaysTarget.pm lib/Prophet/Resolver/Failed.pm lib/Prophet/Resolver/Fixup/MissingSourceOldValues.pm lib/Prophet/Resolver/FromResolutionDB.pm lib/Prophet/Resolver/IdenticalChanges.pm lib/Prophet/Resolver/Prompt.pm lib/Prophet/Server.pm lib/Prophet/Server/Controller.pm lib/Prophet/Server/Dispatcher.pm lib/Prophet/Server/View.pm lib/Prophet/Server/ViewHelpers.pm lib/Prophet/Server/ViewHelpers/Function.pm lib/Prophet/Server/ViewHelpers/HiddenParam.pm lib/Prophet/Server/ViewHelpers/ParamFromFunction.pm lib/Prophet/Server/ViewHelpers/Widget.pm lib/Prophet/Test.pm lib/Prophet/Test/Arena.pm lib/Prophet/Test/Editor.pm lib/Prophet/Test/Participant.pm lib/Prophet/Util.pm lib/Prophet/UUIDGenerator.pm lib/Prophet/Web/Field.pm lib/Prophet/Web/FunctionResult.pm lib/Prophet/Web/Menu.pm lib/Prophet/Web/Result.pm Makefile.PL MANIFEST This list of files MANIFEST.SKIP META.yml share/web/static/jquery/css/indicator.gif share/web/static/jquery/css/jquery.autocomplete.css share/web/static/jquery/css/superfish-navbar.css share/web/static/jquery/css/superfish-vertical.css share/web/static/jquery/css/superfish.css share/web/static/jquery/css/tablesorter/asc.gif share/web/static/jquery/css/tablesorter/bg.gif share/web/static/jquery/css/tablesorter/desc.gif share/web/static/jquery/css/tablesorter/style.css share/web/static/jquery/images/arrows-cccccc.png share/web/static/jquery/images/arrows-ffffff.png share/web/static/jquery/images/shadow.png share/web/static/jquery/js/hoverIntent.js share/web/static/jquery/js/jquery-1.2.6.min.js share/web/static/jquery/js/jquery-autocomplete.js share/web/static/jquery/js/jquery.bgiframe.min.js share/web/static/jquery/js/jquery.tablesorter.min.js share/web/static/jquery/js/pretty.js share/web/static/jquery/js/superfish.js share/web/static/jquery/js/supersubs.js share/web/static/yui/css/reset.css t/aliases.t t/aliases_with_quotes.t t/canonicalize.t t/cli-arg-parsing.t t/cli-arg-translation.t t/cli.t t/config.t t/create-conflict.t t/create.t t/data/aliases.tmpl t/data/settings-first.tmpl t/data/settings-second.tmpl t/data/settings-third.tmpl t/database-settings.t t/default.t t/delete-delete-conflict.t t/edit.t t/error.t t/export.t t/generalized_sync_n_merge.t t/history.t t/info.t t/init.t t/lib/TestApp/Bug.pm t/lib/TestApp/BugCatcher.pm t/lib/TestApp/Bugs.pm t/lib/TestApp/ButterflyNet.pm t/local_metadata.t t/log.t t/luid.t t/malformed-url.t t/non-conflicting-merge.t t/publish-html.t t/publish-pull.t t/real-conflicting-merge.t t/record-types.t t/references.t t/res-conflict-3.t t/resty-server.t t/search.t t/Settings/bin/settings t/Settings/lib/App/Settings.pm t/Settings/lib/App/Settings/Bug.pm t/Settings/lib/App/Settings/CLI.pm t/Settings/lib/App/Settings/Test.pm t/Settings/t/database-settings-editor.t t/Settings/t/sync-database-settings.t t/simple-conflicting-merge.t t/simple-push.t t/sync-change-to-original-source.t t/sync-delete-conflict.t t/sync-ticket.t t/sync_3party.t t/test_app.conf t/usage.t t/use.t t/util.t t/validate.t t/validation.t t/WebToy/bin/webtoy t/WebToy/lib/App/WebToy.pm t/WebToy/lib/App/WebToy/CLI.pm t/WebToy/lib/App/WebToy/Collection/WikiPage.pm t/WebToy/lib/App/WebToy/Model/WikiPage.pm t/WebToy/lib/App/WebToy/Server/Dispatcher.pm t/WebToy/lib/App/WebToy/Server/View.pm xt/01-dependencies.t xt/99-pod-coverage.t xt/99-pod.t prophet-0.750/MANIFEST.SKIP000066400000000000000000000001361160607302300151020ustar00rootroot00000000000000~$ .tmp$ .svn$ .git/* .bak$ ^blib/ pm_to_blib .gitignore ^Makefile(?:\.old)?$ .prove .shipit$ prophet-0.750/META.yml000066400000000000000000000023471160607302300144630ustar00rootroot00000000000000--- abstract: ~ author: - 'clkao and jesse' build_requires: ExtUtils::MakeMaker: 6.42 Test::Exception: 0.26 configure_requires: ExtUtils::MakeMaker: 6.42 distribution_type: module generated_by: 'Module::Install version 0.99' license: mit meta-spec: url: http://module-build.sourceforge.net/META-spec-v1.4.html version: 1.4 name: Prophet no_index: directory: - inc - share - t - xt requires: Any::Moose: 0.04 Config::GitLike: 1.02 DBD::SQLite: 1 DBI: 1 Digest::SHA: 0 Exporter::Lite: 0 ExtUtils::MakeMaker: 6.11 File::ShareDir: 1.00 HTTP::Date: 0 HTTP::Server::Simple: 0.40 IPC::Run3: 0 JSON: 2.00 JSON::XS: 2.2222 LWP::UserAgent: 0 MIME::Base64::URLSafe: 0 Module::Pluggable: 0 Module::Refresh: 0 Mouse: 0.21 Net::Bonjour: 0 Params::Validate: 0 Path::Dispatcher: 0.14 Path::Dispatcher::Declarative: 0 Proc::InvokeEditor: 0 Template::Declare: 0.35 Term::ReadKey: 0 Term::ReadLine::Perl: 0 Test::Exception: 0.26 Test::HTTP::Server::Simple: 0 Test::Pod::Coverage: 0 Test::WWW::Mechanize: 1.16 Time::Progress: 0 URI: 0 UUID::Tiny: 1.02 XML::Atom::SimpleFeed: 0 YAML::Syck: 0 resources: license: http://opensource.org/licenses/mit-license.php version: 0.743 prophet-0.750/Makefile.PL000066400000000000000000000040361160607302300151610ustar00rootroot00000000000000#!/usr/bin/perl use inc::Module::Install; name('Prophet'); # App::Settings App::Settings::CLI App::WebToy App::WebToy::CLI author('clkao and jesse'); license('mit'); requires('Exporter::Lite'); requires('Params::Validate'); requires('IPC::Run3'); requires('UUID::Tiny' => '1.02'); requires('Digest::SHA'); requires('LWP::UserAgent'); # LWP::ConnCache too requires('URI'); requires('HTTP::Date'); requires( 'JSON' => '2.00' ); requires('Module::Pluggable'); requires('Proc::InvokeEditor'); requires( 'Any::Moose' => '0.04' ); requires( 'Mouse' => '0.89' ); requires('XML::Atom::SimpleFeed'); requires( 'Path::Dispatcher' => '1.02' ); requires( 'Path::Dispatcher::Declarative' => '0.03' ); requires('Time::Progress'); requires('Config::GitLike' => '1.02'); requires('MIME::Base64::URLSafe'); if ( $^O =~ /MSWin/ ) { requires( 'Win32' ); } build_requires( 'Test::Exception' => '0.26' ); use Term::ReadLine; # if we don't do this, ::Perl fails feature 'Improved interactive shell' => -default => 1, 'Term::ReadLine::Perl' => 0; feature 'Faster JSON Parsing' => -default => 1, 'JSON::XS', => '2.2222'; feature 'Web server' => -default => 1, 'File::ShareDir' => '1.00', 'HTTP::Server::Simple' => '0.40', # HTTP::Server::Simple::CGI ; feature 'HTML display' => -default => 1, 'Template::Declare' => '0.35', # Template::Declare::Tags ; feature 'Foreign replica support' => -default => 1, 'Term::ReadKey'; feature 'SQLite replica support' => -default => 1, 'DBI' => 1, 'DBD::SQLite' => 1; feature 'Maintainer testing tools' => -default => 0, 'Test::HTTP::Server::Simple', 'YAML::Syck' => 0, 'Module::Refresh' => 0, 'Test::WWW::Mechanize' => '1.16', 'Test::Pod::Coverage'; feature 'Bonjour support' => -default => 0, 'Net::Bonjour', # Net::Rendezvous::Publish ; tests('t/*.t t/*/t/*.t'); all_from('lib/Prophet.pm'); install_share 'share'; auto_install; WriteAll(); prophet-0.750/bin/000077500000000000000000000000001160607302300137545ustar00rootroot00000000000000prophet-0.750/bin/dump_changesets.idx000066400000000000000000000006101160607302300176300ustar00rootroot00000000000000use Prophet::CLI; use Prophet::FilesystemReplica; my $cli = Prophet::CLI->new(); my $file = Prophet::Util->slurp(shift); my $fsr = Prophet::FilesystemReplica->new(app_handle => $cli->app_handle); for (1..(length($file)/Prophet::FilesystemReplica::CHG_RECORD_SIZE)) { my $result = $fsr->_changeset_index_entry(sequence_no => $_, index_file => \$file); print join("\t",@$result)."\n"; } prophet-0.750/bin/dump_record.idx000066400000000000000000000003521160607302300167650ustar00rootroot00000000000000use Prophet::CLI; use Prophet::FilesystemReplica; my $cli = Prophet::CLI->new(); my $type = shift; my $uuid = shift; my @result = $cli->handle->_read_record_index(type => $type, uuid => $uuid); warn YAML::Dump(\@result); use YAML; prophet-0.750/bin/prophet000077500000000000000000000001761160607302300153670ustar00rootroot00000000000000#!/usr/bin/env perl use warnings; use strict; use Prophet::CLI; my $cli = Prophet::CLI->new(); $cli->run_one_command(@ARGV); prophet-0.750/bin/run_test_yml.pl000066400000000000000000000006011160607302300170320ustar00rootroot00000000000000#!/usr/bin/env perl -w use strict; use Prophet::Test::Arena; Prophet::Test::Arena->run_from_yaml; =head1 NAME run_test_yml - rerun recorded test =head1 SYNOPSIS prove -l t/generalized_sync_n_merge.t perl -Ilib bin/run_test_yml.pl RECORDED-TEST.yml =head1 DESCRIPTION You can also copy this file to a .t file, and append the yml content into the C<__DATA__> section. =cut prophet-0.750/bin/taste_recipe000066400000000000000000000012621160607302300163470ustar00rootroot00000000000000#!/usr/bin/env perl use strict; my $script = 'bin/run_test_yml.pl'; my $file = shift; use YAML::Syck; my $d = YAML::Syck::LoadFile( $file ); use Storable 'dclone'; use Data::Dumper; use File::Temp; for (reverse 0..$#{ $d->{recipe}} ) { warn "==> removing $_ ".Dumper($d->{recipe}->[$_]); next if $d->{recipe}->[$_][1] eq 'create_record'; my $foo = dclone $d; splice(@{$foo->{recipe}}, $_, 1); my $tmpf = File::Temp->new; YAML::Syck::DumpFile($tmpf, $foo); system('perl', '-Ilib', $script, $tmpf); if ($?) { $d = $foo; } warn "result : $?"; } warn "streamlined recipe at $file.new"; YAML::Syck::DumpFile( "$file.new", $d); prophet-0.750/doc/000077500000000000000000000000001160607302300137515ustar00rootroot00000000000000prophet-0.750/doc/foreign-replicas000066400000000000000000000072211160607302300171270ustar00rootroot00000000000000 =head1 Resplutions Resolutions are stored in a seperate database because they're supposed to be propagated _like_ regular changesets but always sent before the regular changesets. =head1 Native Replicas =head2 Merge tickets =head1 Foreign Replicas A foreign replica is a (possibly read-only) data store that is not a Prophet replica (such as RT or Twitter). A Prophet replica can act as a gateway to a foreign replica. Because we can't store arbitrary metadata in foreign replicas, we do not yet support arbitrary topology sync of foreign replicas. A single Prophet-backed replica must act as a gateway to a foreign replica. It would be great if Prophet could, in a general way, allow arbitrary topology sync of foreign replicas. but it was not ever a goal. Foreign replicas never talk directly with each other. Their communciations are always intermediated by a Prophet replica. The design wasn't such that you could have multiple replicas gatewaying transactions between a pair of foreign replicas. Foreign replicas aren't really full-fledged replicas, they piggyback on another replica on the proxying host to store metadata about merges, conflicts and local id (luid) to uuid mappings. When working with Foreign Replicas, the local state handle that tracks data on behalf of a foreign database using merge tickets. Our merge tickets work like svk's. they're a high-water mark of "the most recent transaction merged from some other replica", keyed by the replica uuid of that other replica. Prophet always merges all transactions from a replica sequentially. So when bob is pushing to a foreign replica, we use metadata stored in bob's replica to interact with the foreign replica. _merge_ticket records are an example of this however, when you do a push to a foreign replica, it should be storing that transaction as merged (See App::SD::ForeignReplica::record_pushed_transaction) The test that's failing is Bob pulls a task from HM and then pushes to RT. RT never gets the HM task. the specific problem I'm seeing is when bob pushes to RT, RT needs to know what the high water mark from Hiveminder is. because RT doesn't have a full replica, it ends up accidentally using Bob's merge tickets exemplified by these two adjacent lines in the logfile: Checking metadata in BOB-UUID: (_merge_tickets, HIVEMINDER-UUID, last-changeset) -> 3 RT-UUID's last_changeset_from_source(HIVEMINDER-UUID) -> 3 I think state_handle should be an entirely separate replica, just as resolutions are But it should never be propagated. can't it be a replica we just don't propagate? so far, your description doesn't give me any reaason to think that ending up with an explicitly seperate state database would improve anything. and it would add more moving parts. we're being bitten by reusing the Prophet replica's records if the foreign replica had its own replica, then there would be no overlap and this issue would just go away the foreign replica is using the real replica's _merge_ticket records I _believe_ that our state handle stuff should entirely replace the need to even use those merge tickets are "most recent changeset seen from replica ABC". those are generally useful to propagate around. except in the case of the foreign replica where it only ever matters what the most recent local changest we've pushed to the foreign replica (pulling from an FR should, I believe, use regular merge tickets) =head1 Open issues Prophet::ForeignReplica should probably be subclassing the bits of code that deal with MergeTickets. also, apparently "merge tickets" is a horrible name that confuses people it may want renaming prophet-0.750/doc/glossary000066400000000000000000000067231160607302300155470ustar00rootroot00000000000000=head1 FALLACIES OF DISTRIBUTED COMPUTING The Fallacies of Distributed Computing are a set of common but flawed assumptions made by programmers when first developing distributed applications. The fallacies are summarized as follows: =over =item 1. The network is reliable. =item 2. Latency is zero. =item 3. Bandwidth is infinite. =item 4. The network is secure. =item 5. Topology doesn't change. =item 6. There is one administrator. =item 7. Transport cost is zero. =item 8. The network is homogeneous. =back =head1 GLOSSARY =head2 Database A term to describe a uniquely identified set of object types and records sharing a single 'base' revision and Replica identifier A database contains multiple Records Has one: uuid =head2 Replica An instance of a database. Replicas are expected to contain all Changesets from any other replica they have been synchronized with, but those Changesets are not guaranteed to be in the same sequence on each replica Has one: uuid =head2 Changeset A changeset contains "old" and "new" versions of a set of database "Records", they can be of any Record Type. Has one: source_uuid, sequence_no Has many: changes =head2 Change An entry in a changeset. Contains all the updates to a given record for this changeset =head2 Record A Record is composed of zero or more Attributes and a universally unique identifier. Each record is categorized into a Record Type. Has one: uuid Has many: attributes =head2 Record Type A Record Type is a category or "bucket" for zero or more records applications may define specific behaviours for Records of a certain Record Type, but Prophet does no more than to tag Records with a Record Type. Record Types are uniquely identified with a textual name and a UUID Has one: textual label, uuid =head2 Attribute A key-value pair on a Record. Has one: key, value =head2 Conflict A Conflict occurs when a Changeset is being applied and the current state of a Replica meets any of the following criteria: =over 4 =item The Replica already contains a record marked as "created" in the changeset =item The Replica doesn't contain a record marked as "deleted" in the changeset =item The Replica doesn't contain a record marked as "updated" in the changeset =item The Replica contains a record marked as "updated" in the changeset, but the current state of the properties on the record does not match the "old" state of the record in the changeset. =back =head2 Resolution When the local Replica =head2 Export/Publish Exporting is the act of cloning a replica. The replica uuid (which is its identity) remains the same. An exported replica is useful for copying to another machine or USB stick, so that other people may pull your changes. Publishing a replica just export it to another machine, usually over ssh. Exporting or publishing to an existing replica just updates that replica, it does not perform a merge, since the replica uuid is the same for both copies. =head2 Push/Pull Pushing and pulling merge changes from two different replicas. Pushing to or pulling from an empty replica creates it and assigns it a new replica uuid. =head2 Foreign Replica A (possibly read-only) data store that is not a Prophet replica (such as RT or Twitter). A Prophet replica can act as a gateway to a foreign replica. =head1 NON-TERMS =head2 Node Too many things get called records =head2 revision Revision is a term from version control. the backing store may describe it but it's not a Prophet term prophet-0.750/doc/jesse_test_env_setup000066400000000000000000000002621160607302300201340ustar00rootroot00000000000000export PATH=/usr/bin:$PATH export PERL5LIB=/Users/jesse/svk/rt-3.8/lib export JIFTY_APP_ROOT=/Users/jesse/svk/hiveminder-trunk/ export RT_DBA_USER=root export RT_DBA_PASSWORD='' prophet-0.750/doc/luid000066400000000000000000000015421160607302300146330ustar00rootroot00000000000000GUIDs are not great to work with. "B900A5B8-2322-11DD-A835-2B9E427B83F6" is a lot to demand of a user. And god forbid they have to actually type in that meaningless string. Substring matching doesn't help much because two GUIDs can easily differ by only a bit. Instead, we give users local IDs for each record. So instead of "B900A5B8-2322-11DD-A835-2B9E427B83F6" they might get "7". Local IDs are local to a replica - so two users can have different records with local ID "7". Because local IDs are integers, they're always distinguishable from global IDs. Local IDs are mildly fleeting. They're contained in a single file (directory?) which may be removed at any time. Every time a record is loaded, it's given a local ID which is cached so the user may use it. The local ID -> global ID mapping is contained in the $replica/local-id-cache file (directory?). prophet-0.750/doc/merging-and-conflicts000066400000000000000000000132361160607302300200530ustar00rootroot00000000000000 Prophet has the concept of a database and a replica a database is the global state so jesse and I both have copies of the sd buglist, that's the same database you can pull to and from anyone with the same database uuid. you can pull from someone with a different database uuid if you say --force but that may get sketchy replicas are an individual instance of the database however, there can be multiple copies of the same replica I have on my laptop a replica of the sd buglist and on my webserver (sartak.org/misc/sd) I have the same replica it's the same instance, just different copies. you could have a copy on a thumbdrive or whatever *nod* it's a fatal error if you try to merge two replicas with the same replica uuid you can only copy the replica wholesale (we call it "export") export keeps the same replica uuid if you merge between a replica and the empty replica, that is how you create a replica with the same database uuid and different replica uuid so to get a new replica (say you want to start tracking sd's bugs) you must do a merge. "sd pull --from url" when you have an empty database will do exactly this. so in the abstract replica is a set of record UUIDs and record data and record history? I'd say more that a replica is an ordered collection of changesets or is it more like darcs (the set of changes) we store the current state of each record for efficiency :) but when we merge we don't look at that region of the replica at all the changesets are not mutable (i'm guessing ;-) correct one common technique is to cache the tip of a shadow paging view but if you completely zap the record set you can always reconstruct it right! okay, so we're definitely more about changesets than records onto conflicts there are like four possible conflicts two changesets that create the same record uuid (rare, I think this is only when we have that astronomically improbable uuid collision) a changeset that deletes a record we don't have a changeset that updates a record we don't have a changeset that updates a record, but the old value isn't our current value "value" being the whoe record? or a cell? a property a property is a hash key/value :) *nod* but if the key is different then it's more the "we don't have" case, no? "we have too much" you create a record, I pull it you set its name property to yuval I set its name property to shawn I pull from you oh record == one key value pair? a record is a hash i thought record == one set of key value pairs it's a set, right when I pull from you, I get a "change uuid BLAH's ircer property from undef to yuval" but that's a conflict because my BLAH's ircer is shawn *nod* it's a really simple system yeah so how does all the self healing mumbo jumbo work ;-) all I know about that is, when I pull from you before I pull any changesets I pull all of your resolutions resolution is a type of changeset? yes a special type of changeset *nod* (its is_resolution attribute is 1! :)) are changesets dependent? nope not even theory of patches dependent? nope clarification: they *are* dependent in that when you pull changeset X from somebody, you will have changesets 1, 2, ..., X-1 already applied. but there are no explicit dependencies. they have a numeric id k resolutions are so weird that we have to keep a second replica inside your real replica just to keep track of everything aren't they just a CAS of conflicts -> changesets? theoretically? probably this is really shaky territory for me :) ok if you ask obra, ask him in email or irc so I can learn too :) so the self healing magic is that if you are going to have a conflict and the same identical conflict already has a resolution then you get the resolution too ? yes instead of one resolution there can actually be many different resolutions we choose whichever one occurred most okay how are conflicts compared for equality? just on data? or also on metadata? now I suddenly know a lot more about conflict resolution ;) looks like just data http://code.bestpractical.com/bps-public/Prophet/trunk/lib/Prophet/Resolver/FromResolutionDB.pm it's a page of code the conflict resolution engine is pluggable. when you use the command line, we prompt by default, though that can be changed so resolutions are retargetable to new data with the same type of conflict? e.g. if we have two human objects I hope not * nothingmuch too ;-) I'm pretty sure the resolution knows which exactly changesets it's resolving ah, ok so the self healing buzz is mostly about which resolution to choose? yep k * nothingmuch is much calmer now ;-) I'm also very happy it's not as complex as I feared this is all pretty sensible prophet-0.750/doc/notes-on-merging000066400000000000000000000245021160607302300170670ustar00rootroot00000000000000Wikipedia Fallacies_of_Distributed_Computing The Fallacies of Distributed Computing are a set of common but flawed assumptions made by programmers when first developing distributed applications. The fallacies are summarized as follows [1]: 1. The network is reliable. 2. Latency is zero. 3. Bandwidth is infinite. 4. The network is secure. 5. Topology doesn't change. 6. There is one administrator. 7. Transport cost is zero. 8. The network is homogeneous. Notes on conflict Initial state: clkao: record 1. foo: bar (clkao@1) merge -> jesse clkao: record 1 foo:bar (@1) jesse: record 1 foo:bar (@1) (merged from: clkao@1) clkao: changes record 1: foo bar->baz (@2) jesse: changes record 1: foo bar->frotz (@2) Current state: clkao: record 1@2. foo: baz jesse: record 1@2. foo: frotz (right so far?) merge: clkao -> jesse replay clkao@2 (first new rev) update record 1: change foo from bar->baz CONFLICT! (jesse's proposed merge algorithm) pre-fixup: record 1, foo: revert frotz to bar jesse@3 apply clkao@2: record 1, foo: bar->baz conflict resolution: baz vs frotz in the case that they were the same, resolve in favor of (always pick local?) but they're not the same, so we: * look for a pre-existing resolution between clkao@2 and jesse@2? record 1, bar->baz AND record 1, bar->frotz? ya, that's because frotz does't match bar, not because frotz doesn't match baz. if the two changesets make the same change, it's still a conflict which is.. normally resolved as "hey, we have the same change" ya ok let me try to write out what I was thinking for merge here? or skip it and keep going with this as our definition of conflict? now I'll shut up. I feel like this is at the edges of my grasp of distributed systems. the thing is, the resolution strategy has to be in the db or be predefined. otherwise different replica can get different stuff. or is this going to be allowed? or if so we should layer it ontop as local changes to be applied to this replica, which should also be based on the head of the Database. so when merge, we either resolve conflict making the change gone, or keep it local on top of the Database. right, but not all replicas want that particular resolution, no? so World will know all changes, except in replica A it's using X, in replica B it's using Y after the conflict Z ? ok. so your resolution is a new changeset that overrides the one conflicts with you. A and B conflcit on X, resolved as Y A and C conflict on X, resolved as Z how about two variants of conflict resolution? as they are parallel. who is to decide Y and Z? In your example, do you assume the merges take place on B and C in parallel but neither feeds back to A yet? Assuming so, B@post-conflict = Y C@post-conflict= Z B merges to A. A ends up with resolution as Y C merges to A. There is a conflict between Y and Z. Presumably that last update wins. next time you sync A->B, B gets told that Y beat Z what if B<-> C sync before A<->B and A<->C sync. in the worst case, assume they decide on "Y beats Z" Do we need a 4th record for the pessimal case here? Then A<->C sync. I guess the main problem is "who is deciding", the one that initiates the sync? practically yes, because you can publish a resolution based on the latest HEAD of the others. so there's no "they decide", it's up to who syncs the other first? fwiw, I believe we're running into the byzantine generals problem. http://en.wikipedia.org/wiki/Paxos_algorithm But those require knowing how many replicas you have, don't they? so, how do we stabilize the system? hm. I wonder if there's a way to publish a list of "whom you trust in in the face of conflicts" and to have that be a higher-level function. that feels sort of evil and wrong, though the thing we're trying to avoid is ping-pong. and we can detect the ping-pong reliably, right? i don't think there'll ping-pong in our case, because the one who merges always wins (not that his version, but he gets to decide and publish), the resolution is a change based on the latest version of the other party, so the next sync will work without conflict and because we record all previous merge decisions, even if there's a long loop of other replicas off in the wilderness, when they come back, we'll still know what we decided originally? right, you get to "insist on" your change, but that's supposedly not a conflict, but of higher level functionality, which might cause ping-pong, but not in the db sync layer ok. I need to be @client in 8 hours. I should probably go sleep. if this is interesting enough for you to hack on, the bits I stalled out on code wise were: * storing merges * actually beating SVN::Ra into giving me revision history (no easy-to-decode examples) get_logs(), it's quita painful. I noticed * bin/merger had my state ok. i will do customer stuff first next hour, and see how much i can get much appreciated :) I'll commit this file enjoy the hacking. ->irc but then when X and Y sync, there will be conflict AA. and they'll need to resolve. it should not be possible to fork. conflicting Database HEAD is still a Conflict and when you sync, you'll propagate your resolutions. hm. my vision had the resolution as something that got propagated. but when calcualating future conflicts, treating the resolution and the Every replica needs to be eventually consistent. if you go back and forth, eventually, someone will win and someone will lose. but the goal is for us to end up with one current worldview when all is said and done. I think we need to allow for users to make conflicting, distributed resolution decisions. and forcing reconcilation later. possibly with: * bob chose to reconcile this as A over B * jesse and clkao chose to reconcile this as B over C can upstream reject the changes? then we shouldn't call these replicas I don't think there is 'upstream' Resolution When the local Replica =head2 Assumptions about merge sources - a source represents a branch - a branch has had many revisions applied - revisions we apply to a branch from another branch are always applied UNCHANGED - in the event of a conflict, we will apply a "pre-changeset" to smooth the application and a "post-changeset" if the applied changeset doesn't match the desired outcome. on pull we end up applying every single source changeset this peer has seen since the last time we saw this peer, one by one we possibly apply another two changesets for each changeset to smooth the application we skip any source changeset we've seen from another source before on push, we publish each changeset we've made targetly. - including "after" fixup changesets - not including "before" fixup changesets. =head2 assumptions about merge changesets we can get (and hash): -original database uuid -original database revno -original database author -all changes Audrey@1: Createdb Add ticket 1 - subject 'foo'; status 'new' Jesse@1: j pull from a@1 j@1 - foo;new a@1 - foo;new j@2 foo->bar a@2 foo->baz c@1 pull from j@2 c now has: a@1 j@2 c@2 bar->frotz c@3 pull from a@2 a hands c: a@2:foo->baz Conflict to target c applies a pre-fixup: frotz->foo to target c applies a@2: foo->baz to target c applies a conflict resolution baz->frotz c@4 push to a@2 beforehand, a's state: baz beforehand, c's state: frotz beforehand, c's unpushed transactions: j@2 foo->bar c@2 bar->frotz c@3 pre-fixup: frotz->foo a@2: foo->baz post-fixup: baz->frotz so, what do we push? options: fixup to get a to our earliest state unpushed. that's kind of stupid and results in us replaying everthing all the time. compute a single large changeset from a@HEAD to c@HEAD and apply that? investigate "rumor" peer to peer replication take 2 there's a new peer I've not seen before. I need to merge in all their changes: if a changeset comes from a replica@rev lower than our last-seen version for replica, skip it for each of their changesets, import it, applying fixups as needed =head3 push get the source list of all merge tickets. find all target revs which source has never seen. - genuine target changesets - third-party changesets that the source has no merge ticket for - skip "before" fixup changesets - include "after" fixup changesets, since they're merge resolutions iterate over these revs the source has never seen. when the changeset applies cleanly, just apply it. when the changeset does _not_ apply cleanly, - apply a 'before' fix up transaction - apply the original changeset - apply a merge resolution changeset. - TODO: this doesn't feel quite right - the resolution should be the same as the equivalent merge resolution transaction on the "pull" variant if it exists. What info do we have here? - record uuid - nearest parent variant - target variant - source variant - information from the 'future (target head)' about the eventual desired outcome - audit stage: compare target head and source head. - they should now be identical, since the last transactions we replayed were all target merge fixup changesets. (is that true?) =cut 1; prophet-0.750/doc/repository-layout000066400000000000000000000004401160607302300174240ustar00rootroot00000000000000SVN REPO _prophet _merge_tickets remote-source-uuid last-changeset: 1234 (changeset sequence no) $record_type uuid-1234 this file's svn props are the record properties uuid-1235 uuid-1236 prophet-0.750/doc/tab-completion000066400000000000000000000005131160607302300166100ustar00rootroot00000000000000zsh: sudo ln -vis `pwd`/etc/prophet.zsh /usr/share/zsh/site-functions/_prophet (tested on Mac OS X 10.6) bash: symlink etc/prophet.bash to your bash_completion.d directory, if you're using macports this will be /opt/local/etc/bash_completion.d On a linux platform, this is likely to be /etc/bash_completion.d prophet-0.750/doc/todo000066400000000000000000000055511160607302300146470ustar00rootroot00000000000000Todo - document sd & replica format - native replica type isn't properly transactional. aborting at the wrong time will cause great sadness and possible corruption - make merge aware of database uuids - and does it mean if the db is initialized with a pull, it uses the same UUID - yes. in general, dbs should be initialized with pull or be new projects - merging between replicas with different uuids should require a 'force' argument of some kind. "publish my changes for remote pulling" - mostly done. needs test and cleanup, "publish" scp wrapper - move merge-ticket logic out of handle and only provides metadata storage - validation on bug tracker fields - severity - Replace this todo list with a svb database - elegant support for large attachments - RESTy web server API to let third-parties build non-perl apps against a Prophet Depot - define a value for a property that is a reference to: - another record - a set of records @done - sketch out RT scrips replacement Saturday done - implement a simple Prophet::Replica::Hiveminder for "personal tasks only" - extract the reusable bits of Prophet::Replica::RT to Prophet::ForeignReplica - implement uuids for prophet databases DONE - light dinner - dinner @done Todo after saturday: Archive: - ability to add comments to a bug (visible history entries) - maybe long-prop edits - when committing any change: - record the original depot uuid and change sequence_no as revprops - record two merge tickets: - sequence_no from the source that gave it to us - sequence_no from the original recording source - naive support for large attachments - ability to pull non-conflicting updates from a remote db - implement merge of conflicts with: "local always wins" - record conflict resolution data - reuse conflict resolution data on repeated resolve - ability to 'pull' conflicting updates from a remote db - prompt for resolution of conflicts - handle file_conflict - test byzantine sync behaviour - handle conflicting conflict resolutions - base bug tracking schema - ::CLI should automatically discover an app's model class based on the type name @done - Creation of bug tracking model classes @done - status @done - relations between models @done - find out what the remote _would_ pull by inspecting its merge tickets on @done - current replica @done - once we do that, we can find out who _we_ have synced from after that point, right? Then we want: @done - anyone we have a merge ticket for _since_ the last time the other party saw us. @done - nobu @done - get RT to give us a list of ticket records matching our query @done - get rt ro give us a list of history entries on those ticket records @done prophet-0.750/doc/web_form_handling000066400000000000000000000004631160607302300173430ustar00rootroot00000000000000# in the dispatcher: # get all form fields that match the spec # bundle them by record # order them by the desired order # canonicalize # validate # execute if we're to execute # on failure # rerender the current page # on success # go to "next page" prophet-0.750/etc/000077500000000000000000000000001160607302300137575ustar00rootroot00000000000000prophet-0.750/etc/prophet.bash000066400000000000000000000001661160607302300163020ustar00rootroot00000000000000function _prophet_() { COMPREPLY=($($1 _gencomp ${COMP_WORDS[COMP_CWORD]})) } complete -F _prophet_ myprophetapp prophet-0.750/etc/prophet.zsh000066400000000000000000000002131160607302300161620ustar00rootroot00000000000000#compdef prophet sd typeset -a prophet_completions prophet_completions=($($words[1] _gencomp $words[2,-1])) compadd $prophet_completions prophet-0.750/inc/000077500000000000000000000000001160607302300137555ustar00rootroot00000000000000prophet-0.750/inc/Module/000077500000000000000000000000001160607302300152025ustar00rootroot00000000000000prophet-0.750/inc/Module/AutoInstall.pm000066400000000000000000000542311160607302300200040ustar00rootroot00000000000000#line 1 package Module::AutoInstall; use strict; use Cwd (); use ExtUtils::MakeMaker (); use vars qw{$VERSION}; BEGIN { $VERSION = '1.03'; } # special map on pre-defined feature sets my %FeatureMap = ( '' => 'Core Features', # XXX: deprecated '-core' => 'Core Features', ); # various lexical flags my ( @Missing, @Existing, %DisabledTests, $UnderCPAN, $HasCPANPLUS ); my ( $Config, $CheckOnly, $SkipInstall, $AcceptDefault, $TestOnly, $AllDeps ); my ( $PostambleActions, $PostambleUsed ); # See if it's a testing or non-interactive session _accept_default( $ENV{AUTOMATED_TESTING} or ! -t STDIN ); _init(); sub _accept_default { $AcceptDefault = shift; } sub missing_modules { return @Missing; } sub do_install { __PACKAGE__->install( [ $Config ? ( UNIVERSAL::isa( $Config, 'HASH' ) ? %{$Config} : @{$Config} ) : () ], @Missing, ); } # initialize various flags, and/or perform install sub _init { foreach my $arg ( @ARGV, split( /[\s\t]+/, $ENV{PERL_AUTOINSTALL} || $ENV{PERL_EXTUTILS_AUTOINSTALL} || '' ) ) { if ( $arg =~ /^--config=(.*)$/ ) { $Config = [ split( ',', $1 ) ]; } elsif ( $arg =~ /^--installdeps=(.*)$/ ) { __PACKAGE__->install( $Config, @Missing = split( /,/, $1 ) ); exit 0; } elsif ( $arg =~ /^--default(?:deps)?$/ ) { $AcceptDefault = 1; } elsif ( $arg =~ /^--check(?:deps)?$/ ) { $CheckOnly = 1; } elsif ( $arg =~ /^--skip(?:deps)?$/ ) { $SkipInstall = 1; } elsif ( $arg =~ /^--test(?:only)?$/ ) { $TestOnly = 1; } elsif ( $arg =~ /^--all(?:deps)?$/ ) { $AllDeps = 1; } } } # overrides MakeMaker's prompt() to automatically accept the default choice sub _prompt { goto &ExtUtils::MakeMaker::prompt unless $AcceptDefault; my ( $prompt, $default ) = @_; my $y = ( $default =~ /^[Yy]/ ); print $prompt, ' [', ( $y ? 'Y' : 'y' ), '/', ( $y ? 'n' : 'N' ), '] '; print "$default\n"; return $default; } # the workhorse sub import { my $class = shift; my @args = @_ or return; my $core_all; print "*** $class version " . $class->VERSION . "\n"; print "*** Checking for Perl dependencies...\n"; my $cwd = Cwd::cwd(); $Config = []; my $maxlen = length( ( sort { length($b) <=> length($a) } grep { /^[^\-]/ } map { ref($_) ? ( ( ref($_) eq 'HASH' ) ? keys(%$_) : @{$_} ) : '' } map { +{@args}->{$_} } grep { /^[^\-]/ or /^-core$/i } keys %{ +{@args} } )[0] ); # We want to know if we're under CPAN early to avoid prompting, but # if we aren't going to try and install anything anyway then skip the # check entirely since we don't want to have to load (and configure) # an old CPAN just for a cosmetic message $UnderCPAN = _check_lock(1) unless $SkipInstall; while ( my ( $feature, $modules ) = splice( @args, 0, 2 ) ) { my ( @required, @tests, @skiptests ); my $default = 1; my $conflict = 0; if ( $feature =~ m/^-(\w+)$/ ) { my $option = lc($1); # check for a newer version of myself _update_to( $modules, @_ ) and return if $option eq 'version'; # sets CPAN configuration options $Config = $modules if $option eq 'config'; # promote every features to core status $core_all = ( $modules =~ /^all$/i ) and next if $option eq 'core'; next unless $option eq 'core'; } print "[" . ( $FeatureMap{ lc($feature) } || $feature ) . "]\n"; $modules = [ %{$modules} ] if UNIVERSAL::isa( $modules, 'HASH' ); unshift @$modules, -default => &{ shift(@$modules) } if ( ref( $modules->[0] ) eq 'CODE' ); # XXX: bugward combatability while ( my ( $mod, $arg ) = splice( @$modules, 0, 2 ) ) { if ( $mod =~ m/^-(\w+)$/ ) { my $option = lc($1); $default = $arg if ( $option eq 'default' ); $conflict = $arg if ( $option eq 'conflict' ); @tests = @{$arg} if ( $option eq 'tests' ); @skiptests = @{$arg} if ( $option eq 'skiptests' ); next; } printf( "- %-${maxlen}s ...", $mod ); if ( $arg and $arg =~ /^\D/ ) { unshift @$modules, $arg; $arg = 0; } # XXX: check for conflicts and uninstalls(!) them. my $cur = _load($mod); if (_version_cmp ($cur, $arg) >= 0) { print "loaded. ($cur" . ( $arg ? " >= $arg" : '' ) . ")\n"; push @Existing, $mod => $arg; $DisabledTests{$_} = 1 for map { glob($_) } @skiptests; } else { if (not defined $cur) # indeed missing { print "missing." . ( $arg ? " (would need $arg)" : '' ) . "\n"; } else { # no need to check $arg as _version_cmp ($cur, undef) would satisfy >= above print "too old. ($cur < $arg)\n"; } push @required, $mod => $arg; } } next unless @required; my $mandatory = ( $feature eq '-core' or $core_all ); if ( !$SkipInstall and ( $CheckOnly or ($mandatory and $UnderCPAN) or $AllDeps or _prompt( qq{==> Auto-install the } . ( @required / 2 ) . ( $mandatory ? ' mandatory' : ' optional' ) . qq{ module(s) from CPAN?}, $default ? 'y' : 'n', ) =~ /^[Yy]/ ) ) { push( @Missing, @required ); $DisabledTests{$_} = 1 for map { glob($_) } @skiptests; } elsif ( !$SkipInstall and $default and $mandatory and _prompt( qq{==> The module(s) are mandatory! Really skip?}, 'n', ) =~ /^[Nn]/ ) { push( @Missing, @required ); $DisabledTests{$_} = 1 for map { glob($_) } @skiptests; } else { $DisabledTests{$_} = 1 for map { glob($_) } @tests; } } if ( @Missing and not( $CheckOnly or $UnderCPAN ) ) { require Config; print "*** Dependencies will be installed the next time you type '$Config::Config{make}'.\n"; # make an educated guess of whether we'll need root permission. print " (You may need to do that as the 'root' user.)\n" if eval '$>'; } print "*** $class configuration finished.\n"; chdir $cwd; # import to main:: no strict 'refs'; *{'main::WriteMakefile'} = \&Write if caller(0) eq 'main'; return (@Existing, @Missing); } sub _running_under { my $thing = shift; print <<"END_MESSAGE"; *** Since we're running under ${thing}, I'll just let it take care of the dependency's installation later. END_MESSAGE return 1; } # Check to see if we are currently running under CPAN.pm and/or CPANPLUS; # if we are, then we simply let it taking care of our dependencies sub _check_lock { return unless @Missing or @_; my $cpan_env = $ENV{PERL5_CPAN_IS_RUNNING}; if ($ENV{PERL5_CPANPLUS_IS_RUNNING}) { return _running_under($cpan_env ? 'CPAN' : 'CPANPLUS'); } require CPAN; if ($CPAN::VERSION > '1.89') { if ($cpan_env) { return _running_under('CPAN'); } return; # CPAN.pm new enough, don't need to check further } # last ditch attempt, this -will- configure CPAN, very sorry _load_cpan(1); # force initialize even though it's already loaded # Find the CPAN lock-file my $lock = MM->catfile( $CPAN::Config->{cpan_home}, ".lock" ); return unless -f $lock; # Check the lock local *LOCK; return unless open(LOCK, $lock); if ( ( $^O eq 'MSWin32' ? _under_cpan() : == getppid() ) and ( $CPAN::Config->{prerequisites_policy} || '' ) ne 'ignore' ) { print <<'END_MESSAGE'; *** Since we're running under CPAN, I'll just let it take care of the dependency's installation later. END_MESSAGE return 1; } close LOCK; return; } sub install { my $class = shift; my $i; # used below to strip leading '-' from config keys my @config = ( map { s/^-// if ++$i; $_ } @{ +shift } ); my ( @modules, @installed ); while ( my ( $pkg, $ver ) = splice( @_, 0, 2 ) ) { # grep out those already installed if ( _version_cmp( _load($pkg), $ver ) >= 0 ) { push @installed, $pkg; } else { push @modules, $pkg, $ver; } } return @installed unless @modules; # nothing to do return @installed if _check_lock(); # defer to the CPAN shell print "*** Installing dependencies...\n"; return unless _connected_to('cpan.org'); my %args = @config; my %failed; local *FAILED; if ( $args{do_once} and open( FAILED, '.#autoinstall.failed' ) ) { while () { chomp; $failed{$_}++ } close FAILED; my @newmod; while ( my ( $k, $v ) = splice( @modules, 0, 2 ) ) { push @newmod, ( $k => $v ) unless $failed{$k}; } @modules = @newmod; } if ( _has_cpanplus() and not $ENV{PERL_AUTOINSTALL_PREFER_CPAN} ) { _install_cpanplus( \@modules, \@config ); } else { _install_cpan( \@modules, \@config ); } print "*** $class installation finished.\n"; # see if we have successfully installed them while ( my ( $pkg, $ver ) = splice( @modules, 0, 2 ) ) { if ( _version_cmp( _load($pkg), $ver ) >= 0 ) { push @installed, $pkg; } elsif ( $args{do_once} and open( FAILED, '>> .#autoinstall.failed' ) ) { print FAILED "$pkg\n"; } } close FAILED if $args{do_once}; return @installed; } sub _install_cpanplus { my @modules = @{ +shift }; my @config = _cpanplus_config( @{ +shift } ); my $installed = 0; require CPANPLUS::Backend; my $cp = CPANPLUS::Backend->new; my $conf = $cp->configure_object; return unless $conf->can('conf') # 0.05x+ with "sudo" support or _can_write($conf->_get_build('base')); # 0.04x # if we're root, set UNINST=1 to avoid trouble unless user asked for it. my $makeflags = $conf->get_conf('makeflags') || ''; if ( UNIVERSAL::isa( $makeflags, 'HASH' ) ) { # 0.03+ uses a hashref here $makeflags->{UNINST} = 1 unless exists $makeflags->{UNINST}; } else { # 0.02 and below uses a scalar $makeflags = join( ' ', split( ' ', $makeflags ), 'UNINST=1' ) if ( $makeflags !~ /\bUNINST\b/ and eval qq{ $> eq '0' } ); } $conf->set_conf( makeflags => $makeflags ); $conf->set_conf( prereqs => 1 ); while ( my ( $key, $val ) = splice( @config, 0, 2 ) ) { $conf->set_conf( $key, $val ); } my $modtree = $cp->module_tree; while ( my ( $pkg, $ver ) = splice( @modules, 0, 2 ) ) { print "*** Installing $pkg...\n"; MY::preinstall( $pkg, $ver ) or next if defined &MY::preinstall; my $success; my $obj = $modtree->{$pkg}; if ( $obj and _version_cmp( $obj->{version}, $ver ) >= 0 ) { my $pathname = $pkg; $pathname =~ s/::/\\W/; foreach my $inc ( grep { m/$pathname.pm/i } keys(%INC) ) { delete $INC{$inc}; } my $rv = $cp->install( modules => [ $obj->{module} ] ); if ( $rv and ( $rv->{ $obj->{module} } or $rv->{ok} ) ) { print "*** $pkg successfully installed.\n"; $success = 1; } else { print "*** $pkg installation cancelled.\n"; $success = 0; } $installed += $success; } else { print << "."; *** Could not find a version $ver or above for $pkg; skipping. . } MY::postinstall( $pkg, $ver, $success ) if defined &MY::postinstall; } return $installed; } sub _cpanplus_config { my @config = (); while ( @_ ) { my ($key, $value) = (shift(), shift()); if ( $key eq 'prerequisites_policy' ) { if ( $value eq 'follow' ) { $value = CPANPLUS::Internals::Constants::PREREQ_INSTALL(); } elsif ( $value eq 'ask' ) { $value = CPANPLUS::Internals::Constants::PREREQ_ASK(); } elsif ( $value eq 'ignore' ) { $value = CPANPLUS::Internals::Constants::PREREQ_IGNORE(); } else { die "*** Cannot convert option $key = '$value' to CPANPLUS version.\n"; } } else { die "*** Cannot convert option $key to CPANPLUS version.\n"; } } return @config; } sub _install_cpan { my @modules = @{ +shift }; my @config = @{ +shift }; my $installed = 0; my %args; _load_cpan(); require Config; if (CPAN->VERSION < 1.80) { # no "sudo" support, probe for writableness return unless _can_write( MM->catfile( $CPAN::Config->{cpan_home}, 'sources' ) ) and _can_write( $Config::Config{sitelib} ); } # if we're root, set UNINST=1 to avoid trouble unless user asked for it. my $makeflags = $CPAN::Config->{make_install_arg} || ''; $CPAN::Config->{make_install_arg} = join( ' ', split( ' ', $makeflags ), 'UNINST=1' ) if ( $makeflags !~ /\bUNINST\b/ and eval qq{ $> eq '0' } ); # don't show start-up info $CPAN::Config->{inhibit_startup_message} = 1; # set additional options while ( my ( $opt, $arg ) = splice( @config, 0, 2 ) ) { ( $args{$opt} = $arg, next ) if $opt =~ /^force$/; # pseudo-option $CPAN::Config->{$opt} = $arg; } local $CPAN::Config->{prerequisites_policy} = 'follow'; while ( my ( $pkg, $ver ) = splice( @modules, 0, 2 ) ) { MY::preinstall( $pkg, $ver ) or next if defined &MY::preinstall; print "*** Installing $pkg...\n"; my $obj = CPAN::Shell->expand( Module => $pkg ); my $success = 0; if ( $obj and _version_cmp( $obj->cpan_version, $ver ) >= 0 ) { my $pathname = $pkg; $pathname =~ s/::/\\W/; foreach my $inc ( grep { m/$pathname.pm/i } keys(%INC) ) { delete $INC{$inc}; } my $rv = $args{force} ? CPAN::Shell->force( install => $pkg ) : CPAN::Shell->install($pkg); $rv ||= eval { $CPAN::META->instance( 'CPAN::Distribution', $obj->cpan_file, ) ->{install} if $CPAN::META; }; if ( $rv eq 'YES' ) { print "*** $pkg successfully installed.\n"; $success = 1; } else { print "*** $pkg installation failed.\n"; $success = 0; } $installed += $success; } else { print << "."; *** Could not find a version $ver or above for $pkg; skipping. . } MY::postinstall( $pkg, $ver, $success ) if defined &MY::postinstall; } return $installed; } sub _has_cpanplus { return ( $HasCPANPLUS = ( $INC{'CPANPLUS/Config.pm'} or _load('CPANPLUS::Shell::Default') ) ); } # make guesses on whether we're under the CPAN installation directory sub _under_cpan { require Cwd; require File::Spec; my $cwd = File::Spec->canonpath( Cwd::cwd() ); my $cpan = File::Spec->canonpath( $CPAN::Config->{cpan_home} ); return ( index( $cwd, $cpan ) > -1 ); } sub _update_to { my $class = __PACKAGE__; my $ver = shift; return if _version_cmp( _load($class), $ver ) >= 0; # no need to upgrade if ( _prompt( "==> A newer version of $class ($ver) is required. Install?", 'y' ) =~ /^[Nn]/ ) { die "*** Please install $class $ver manually.\n"; } print << "."; *** Trying to fetch it from CPAN... . # install ourselves _load($class) and return $class->import(@_) if $class->install( [], $class, $ver ); print << '.'; exit 1; *** Cannot bootstrap myself. :-( Installation terminated. . } # check if we're connected to some host, using inet_aton sub _connected_to { my $site = shift; return ( ( _load('Socket') and Socket::inet_aton($site) ) or _prompt( qq( *** Your host cannot resolve the domain name '$site', which probably means the Internet connections are unavailable. ==> Should we try to install the required module(s) anyway?), 'n' ) =~ /^[Yy]/ ); } # check if a directory is writable; may create it on demand sub _can_write { my $path = shift; mkdir( $path, 0755 ) unless -e $path; return 1 if -w $path; print << "."; *** You are not allowed to write to the directory '$path'; the installation may fail due to insufficient permissions. . if ( eval '$>' and lc(`sudo -V`) =~ /version/ and _prompt( qq( ==> Should we try to re-execute the autoinstall process with 'sudo'?), ((-t STDIN) ? 'y' : 'n') ) =~ /^[Yy]/ ) { # try to bootstrap ourselves from sudo print << "."; *** Trying to re-execute the autoinstall process with 'sudo'... . my $missing = join( ',', @Missing ); my $config = join( ',', UNIVERSAL::isa( $Config, 'HASH' ) ? %{$Config} : @{$Config} ) if $Config; return unless system( 'sudo', $^X, $0, "--config=$config", "--installdeps=$missing" ); print << "."; *** The 'sudo' command exited with error! Resuming... . } return _prompt( qq( ==> Should we try to install the required module(s) anyway?), 'n' ) =~ /^[Yy]/; } # load a module and return the version it reports sub _load { my $mod = pop; # class/instance doesn't matter my $file = $mod; $file =~ s|::|/|g; $file .= '.pm'; local $@; return eval { require $file; $mod->VERSION } || ( $@ ? undef: 0 ); } # Load CPAN.pm and it's configuration sub _load_cpan { return if $CPAN::VERSION and $CPAN::Config and not @_; require CPAN; # CPAN-1.82+ adds CPAN::Config::AUTOLOAD to redirect to # CPAN::HandleConfig->load. CPAN reports that the redirection # is deprecated in a warning printed at the user. # CPAN-1.81 expects CPAN::HandleConfig->load, does not have # $CPAN::HandleConfig::VERSION but cannot handle # CPAN::Config->load # Which "versions expect CPAN::Config->load? if ( $CPAN::HandleConfig::VERSION || CPAN::HandleConfig->can('load') ) { # Newer versions of CPAN have a HandleConfig module CPAN::HandleConfig->load; } else { # Older versions had the load method in Config directly CPAN::Config->load; } } # compare two versions, either use Sort::Versions or plain comparison # return values same as <=> sub _version_cmp { my ( $cur, $min ) = @_; return -1 unless defined $cur; # if 0 keep comparing return 1 unless $min; $cur =~ s/\s+$//; # check for version numbers that are not in decimal format if ( ref($cur) or ref($min) or $cur =~ /v|\..*\./ or $min =~ /v|\..*\./ ) { if ( ( $version::VERSION or defined( _load('version') )) and version->can('new') ) { # use version.pm if it is installed. return version->new($cur) <=> version->new($min); } elsif ( $Sort::Versions::VERSION or defined( _load('Sort::Versions') ) ) { # use Sort::Versions as the sorting algorithm for a.b.c versions return Sort::Versions::versioncmp( $cur, $min ); } warn "Cannot reliably compare non-decimal formatted versions.\n" . "Please install version.pm or Sort::Versions.\n"; } # plain comparison local $^W = 0; # shuts off 'not numeric' bugs return $cur <=> $min; } # nothing; this usage is deprecated. sub main::PREREQ_PM { return {}; } sub _make_args { my %args = @_; $args{PREREQ_PM} = { %{ $args{PREREQ_PM} || {} }, @Existing, @Missing } if $UnderCPAN or $TestOnly; if ( $args{EXE_FILES} and -e 'MANIFEST' ) { require ExtUtils::Manifest; my $manifest = ExtUtils::Manifest::maniread('MANIFEST'); $args{EXE_FILES} = [ grep { exists $manifest->{$_} } @{ $args{EXE_FILES} } ]; } $args{test}{TESTS} ||= 't/*.t'; $args{test}{TESTS} = join( ' ', grep { !exists( $DisabledTests{$_} ) } map { glob($_) } split( /\s+/, $args{test}{TESTS} ) ); my $missing = join( ',', @Missing ); my $config = join( ',', UNIVERSAL::isa( $Config, 'HASH' ) ? %{$Config} : @{$Config} ) if $Config; $PostambleActions = ( ($missing and not $UnderCPAN) ? "\$(PERL) $0 --config=$config --installdeps=$missing" : "\$(NOECHO) \$(NOOP)" ); return %args; } # a wrapper to ExtUtils::MakeMaker::WriteMakefile sub Write { require Carp; Carp::croak "WriteMakefile: Need even number of args" if @_ % 2; if ($CheckOnly) { print << "."; *** Makefile not written in check-only mode. . return; } my %args = _make_args(@_); no strict 'refs'; $PostambleUsed = 0; local *MY::postamble = \&postamble unless defined &MY::postamble; ExtUtils::MakeMaker::WriteMakefile(%args); print << "." unless $PostambleUsed; *** WARNING: Makefile written with customized MY::postamble() without including contents from Module::AutoInstall::postamble() -- auto installation features disabled. Please contact the author. . return 1; } sub postamble { $PostambleUsed = 1; return <<"END_MAKE"; config :: installdeps \t\$(NOECHO) \$(NOOP) checkdeps :: \t\$(PERL) $0 --checkdeps installdeps :: \t$PostambleActions END_MAKE } 1; __END__ #line 1071 prophet-0.750/inc/Module/Install.pm000066400000000000000000000301351160607302300171500ustar00rootroot00000000000000#line 1 package Module::Install; # For any maintainers: # The load order for Module::Install is a bit magic. # It goes something like this... # # IF ( host has Module::Install installed, creating author mode ) { # 1. Makefile.PL calls "use inc::Module::Install" # 2. $INC{inc/Module/Install.pm} set to installed version of inc::Module::Install # 3. The installed version of inc::Module::Install loads # 4. inc::Module::Install calls "require Module::Install" # 5. The ./inc/ version of Module::Install loads # } ELSE { # 1. Makefile.PL calls "use inc::Module::Install" # 2. $INC{inc/Module/Install.pm} set to ./inc/ version of Module::Install # 3. The ./inc/ version of Module::Install loads # } use 5.005; use strict 'vars'; use Cwd (); use File::Find (); use File::Path (); use vars qw{$VERSION $MAIN}; BEGIN { # All Module::Install core packages now require synchronised versions. # This will be used to ensure we don't accidentally load old or # different versions of modules. # This is not enforced yet, but will be some time in the next few # releases once we can make sure it won't clash with custom # Module::Install extensions. $VERSION = '1.00'; # Storage for the pseudo-singleton $MAIN = undef; *inc::Module::Install::VERSION = *VERSION; @inc::Module::Install::ISA = __PACKAGE__; } sub import { my $class = shift; my $self = $class->new(@_); my $who = $self->_caller; #------------------------------------------------------------- # all of the following checks should be included in import(), # to allow "eval 'require Module::Install; 1' to test # installation of Module::Install. (RT #51267) #------------------------------------------------------------- # Whether or not inc::Module::Install is actually loaded, the # $INC{inc/Module/Install.pm} is what will still get set as long as # the caller loaded module this in the documented manner. # If not set, the caller may NOT have loaded the bundled version, and thus # they may not have a MI version that works with the Makefile.PL. This would # result in false errors or unexpected behaviour. And we don't want that. my $file = join( '/', 'inc', split /::/, __PACKAGE__ ) . '.pm'; unless ( $INC{$file} ) { die <<"END_DIE" } Please invoke ${\__PACKAGE__} with: use inc::${\__PACKAGE__}; not: use ${\__PACKAGE__}; END_DIE # This reportedly fixes a rare Win32 UTC file time issue, but # as this is a non-cross-platform XS module not in the core, # we shouldn't really depend on it. See RT #24194 for detail. # (Also, this module only supports Perl 5.6 and above). eval "use Win32::UTCFileTime" if $^O eq 'MSWin32' && $] >= 5.006; # If the script that is loading Module::Install is from the future, # then make will detect this and cause it to re-run over and over # again. This is bad. Rather than taking action to touch it (which # is unreliable on some platforms and requires write permissions) # for now we should catch this and refuse to run. if ( -f $0 ) { my $s = (stat($0))[9]; # If the modification time is only slightly in the future, # sleep briefly to remove the problem. my $a = $s - time; if ( $a > 0 and $a < 5 ) { sleep 5 } # Too far in the future, throw an error. my $t = time; if ( $s > $t ) { die <<"END_DIE" } Your installer $0 has a modification time in the future ($s > $t). This is known to create infinite loops in make. Please correct this, then run $0 again. END_DIE } # Build.PL was formerly supported, but no longer is due to excessive # difficulty in implementing every single feature twice. if ( $0 =~ /Build.PL$/i ) { die <<"END_DIE" } Module::Install no longer supports Build.PL. It was impossible to maintain duel backends, and has been deprecated. Please remove all Build.PL files and only use the Makefile.PL installer. END_DIE #------------------------------------------------------------- # To save some more typing in Module::Install installers, every... # use inc::Module::Install # ...also acts as an implicit use strict. $^H |= strict::bits(qw(refs subs vars)); #------------------------------------------------------------- unless ( -f $self->{file} ) { foreach my $key (keys %INC) { delete $INC{$key} if $key =~ /Module\/Install/; } local $^W; require "$self->{path}/$self->{dispatch}.pm"; File::Path::mkpath("$self->{prefix}/$self->{author}"); $self->{admin} = "$self->{name}::$self->{dispatch}"->new( _top => $self ); $self->{admin}->init; @_ = ($class, _self => $self); goto &{"$self->{name}::import"}; } local $^W; *{"${who}::AUTOLOAD"} = $self->autoload; $self->preload; # Unregister loader and worker packages so subdirs can use them again delete $INC{'inc/Module/Install.pm'}; delete $INC{'Module/Install.pm'}; # Save to the singleton $MAIN = $self; return 1; } sub autoload { my $self = shift; my $who = $self->_caller; my $cwd = Cwd::cwd(); my $sym = "${who}::AUTOLOAD"; $sym->{$cwd} = sub { my $pwd = Cwd::cwd(); if ( my $code = $sym->{$pwd} ) { # Delegate back to parent dirs goto &$code unless $cwd eq $pwd; } unless ($$sym =~ s/([^:]+)$//) { # XXX: it looks like we can't retrieve the missing function # via $$sym (usually $main::AUTOLOAD) in this case. # I'm still wondering if we should slurp Makefile.PL to # get some context or not ... my ($package, $file, $line) = caller; die <<"EOT"; Unknown function is found at $file line $line. Execution of $file aborted due to runtime errors. If you're a contributor to a project, you may need to install some Module::Install extensions from CPAN (or other repository). If you're a user of a module, please contact the author. EOT } my $method = $1; if ( uc($method) eq $method ) { # Do nothing return; } elsif ( $method =~ /^_/ and $self->can($method) ) { # Dispatch to the root M:I class return $self->$method(@_); } # Dispatch to the appropriate plugin unshift @_, ( $self, $1 ); goto &{$self->can('call')}; }; } sub preload { my $self = shift; unless ( $self->{extensions} ) { $self->load_extensions( "$self->{prefix}/$self->{path}", $self ); } my @exts = @{$self->{extensions}}; unless ( @exts ) { @exts = $self->{admin}->load_all_extensions; } my %seen; foreach my $obj ( @exts ) { while (my ($method, $glob) = each %{ref($obj) . '::'}) { next unless $obj->can($method); next if $method =~ /^_/; next if $method eq uc($method); $seen{$method}++; } } my $who = $self->_caller; foreach my $name ( sort keys %seen ) { local $^W; *{"${who}::$name"} = sub { ${"${who}::AUTOLOAD"} = "${who}::$name"; goto &{"${who}::AUTOLOAD"}; }; } } sub new { my ($class, %args) = @_; delete $INC{'FindBin.pm'}; { # to suppress the redefine warning local $SIG{__WARN__} = sub {}; require FindBin; } # ignore the prefix on extension modules built from top level. my $base_path = Cwd::abs_path($FindBin::Bin); unless ( Cwd::abs_path(Cwd::cwd()) eq $base_path ) { delete $args{prefix}; } return $args{_self} if $args{_self}; $args{dispatch} ||= 'Admin'; $args{prefix} ||= 'inc'; $args{author} ||= ($^O eq 'VMS' ? '_author' : '.author'); $args{bundle} ||= 'inc/BUNDLES'; $args{base} ||= $base_path; $class =~ s/^\Q$args{prefix}\E:://; $args{name} ||= $class; $args{version} ||= $class->VERSION; unless ( $args{path} ) { $args{path} = $args{name}; $args{path} =~ s!::!/!g; } $args{file} ||= "$args{base}/$args{prefix}/$args{path}.pm"; $args{wrote} = 0; bless( \%args, $class ); } sub call { my ($self, $method) = @_; my $obj = $self->load($method) or return; splice(@_, 0, 2, $obj); goto &{$obj->can($method)}; } sub load { my ($self, $method) = @_; $self->load_extensions( "$self->{prefix}/$self->{path}", $self ) unless $self->{extensions}; foreach my $obj (@{$self->{extensions}}) { return $obj if $obj->can($method); } my $admin = $self->{admin} or die <<"END_DIE"; The '$method' method does not exist in the '$self->{prefix}' path! Please remove the '$self->{prefix}' directory and run $0 again to load it. END_DIE my $obj = $admin->load($method, 1); push @{$self->{extensions}}, $obj; $obj; } sub load_extensions { my ($self, $path, $top) = @_; my $should_reload = 0; unless ( grep { ! ref $_ and lc $_ eq lc $self->{prefix} } @INC ) { unshift @INC, $self->{prefix}; $should_reload = 1; } foreach my $rv ( $self->find_extensions($path) ) { my ($file, $pkg) = @{$rv}; next if $self->{pathnames}{$pkg}; local $@; my $new = eval { local $^W; require $file; $pkg->can('new') }; unless ( $new ) { warn $@ if $@; next; } $self->{pathnames}{$pkg} = $should_reload ? delete $INC{$file} : $INC{$file}; push @{$self->{extensions}}, &{$new}($pkg, _top => $top ); } $self->{extensions} ||= []; } sub find_extensions { my ($self, $path) = @_; my @found; File::Find::find( sub { my $file = $File::Find::name; return unless $file =~ m!^\Q$path\E/(.+)\.pm\Z!is; my $subpath = $1; return if lc($subpath) eq lc($self->{dispatch}); $file = "$self->{path}/$subpath.pm"; my $pkg = "$self->{name}::$subpath"; $pkg =~ s!/!::!g; # If we have a mixed-case package name, assume case has been preserved # correctly. Otherwise, root through the file to locate the case-preserved # version of the package name. if ( $subpath eq lc($subpath) || $subpath eq uc($subpath) ) { my $content = Module::Install::_read($subpath . '.pm'); my $in_pod = 0; foreach ( split //, $content ) { $in_pod = 1 if /^=\w/; $in_pod = 0 if /^=cut/; next if ($in_pod || /^=cut/); # skip pod text next if /^\s*#/; # and comments if ( m/^\s*package\s+($pkg)\s*;/i ) { $pkg = $1; last; } } } push @found, [ $file, $pkg ]; }, $path ) if -d $path; @found; } ##################################################################### # Common Utility Functions sub _caller { my $depth = 0; my $call = caller($depth); while ( $call eq __PACKAGE__ ) { $depth++; $call = caller($depth); } return $call; } # Done in evals to avoid confusing Perl::MinimumVersion eval( $] >= 5.006 ? <<'END_NEW' : <<'END_OLD' ); die $@ if $@; sub _read { local *FH; open( FH, '<', $_[0] ) or die "open($_[0]): $!"; my $string = do { local $/; }; close FH or die "close($_[0]): $!"; return $string; } END_NEW sub _read { local *FH; open( FH, "< $_[0]" ) or die "open($_[0]): $!"; my $string = do { local $/; }; close FH or die "close($_[0]): $!"; return $string; } END_OLD sub _readperl { my $string = Module::Install::_read($_[0]); $string =~ s/(?:\015{1,2}\012|\015|\012)/\n/sg; $string =~ s/(\n)\n*__(?:DATA|END)__\b.*\z/$1/s; $string =~ s/\n\n=\w+.+?\n\n=cut\b.+?\n+/\n\n/sg; return $string; } sub _readpod { my $string = Module::Install::_read($_[0]); $string =~ s/(?:\015{1,2}\012|\015|\012)/\n/sg; return $string if $_[0] =~ /\.pod\z/; $string =~ s/(^|\n=cut\b.+?\n+)[^=\s].+?\n(\n=\w+|\z)/$1$2/sg; $string =~ s/\n*=pod\b[^\n]*\n+/\n\n/sg; $string =~ s/\n*=cut\b[^\n]*\n+/\n\n/sg; $string =~ s/^\n+//s; return $string; } # Done in evals to avoid confusing Perl::MinimumVersion eval( $] >= 5.006 ? <<'END_NEW' : <<'END_OLD' ); die $@ if $@; sub _write { local *FH; open( FH, '>', $_[0] ) or die "open($_[0]): $!"; foreach ( 1 .. $#_ ) { print FH $_[$_] or die "print($_[0]): $!"; } close FH or die "close($_[0]): $!"; } END_NEW sub _write { local *FH; open( FH, "> $_[0]" ) or die "open($_[0]): $!"; foreach ( 1 .. $#_ ) { print FH $_[$_] or die "print($_[0]): $!"; } close FH or die "close($_[0]): $!"; } END_OLD # _version is for processing module versions (eg, 1.03_05) not # Perl versions (eg, 5.8.1). sub _version ($) { my $s = shift || 0; my $d =()= $s =~ /(\.)/g; if ( $d >= 2 ) { # Normalise multipart versions $s =~ s/(\.)(\d{1,3})/sprintf("$1%03d",$2)/eg; } $s =~ s/^(\d+)\.?//; my $l = $1 || 0; my @v = map { $_ . '0' x (3 - length $_) } $s =~ /(\d{1,3})\D?/g; $l = $l . '.' . join '', @v if @v; return $l + 0; } sub _cmp ($$) { _version($_[0]) <=> _version($_[1]); } # Cloned from Params::Util::_CLASS sub _CLASS ($) { ( defined $_[0] and ! ref $_[0] and $_[0] =~ m/^[^\W\d]\w*(?:::\w+)*\z/s ) ? $_[0] : undef; } 1; # Copyright 2008 - 2010 Adam Kennedy. prophet-0.750/inc/Module/Install/000077500000000000000000000000001160607302300166105ustar00rootroot00000000000000prophet-0.750/inc/Module/Install/AutoInstall.pm000066400000000000000000000036321160607302300214110ustar00rootroot00000000000000#line 1 package Module::Install::AutoInstall; use strict; use Module::Install::Base (); use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.00'; @ISA = 'Module::Install::Base'; $ISCORE = 1; } sub AutoInstall { $_[0] } sub run { my $self = shift; $self->auto_install_now(@_); } sub write { my $self = shift; $self->auto_install(@_); } sub auto_install { my $self = shift; return if $self->{done}++; # Flatten array of arrays into a single array my @core = map @$_, map @$_, grep ref, $self->build_requires, $self->requires; my @config = @_; # We'll need Module::AutoInstall $self->include('Module::AutoInstall'); require Module::AutoInstall; my @features_require = Module::AutoInstall->import( (@config ? (-config => \@config) : ()), (@core ? (-core => \@core) : ()), $self->features, ); my %seen; my @requires = map @$_, map @$_, grep ref, $self->requires; while (my ($mod, $ver) = splice(@requires, 0, 2)) { $seen{$mod}{$ver}++; } my @build_requires = map @$_, map @$_, grep ref, $self->build_requires; while (my ($mod, $ver) = splice(@build_requires, 0, 2)) { $seen{$mod}{$ver}++; } my @configure_requires = map @$_, map @$_, grep ref, $self->configure_requires; while (my ($mod, $ver) = splice(@configure_requires, 0, 2)) { $seen{$mod}{$ver}++; } my @deduped; while (my ($mod, $ver) = splice(@features_require, 0, 2)) { push @deduped, $mod => $ver unless $seen{$mod}{$ver}++; } $self->requires(@deduped); $self->makemaker_args( Module::AutoInstall::_make_args() ); my $class = ref($self); $self->postamble( "# --- $class section:\n" . Module::AutoInstall::postamble() ); } sub auto_install_now { my $self = shift; $self->auto_install(@_); Module::AutoInstall::do_install(); } 1; prophet-0.750/inc/Module/Install/Base.pm000066400000000000000000000021471160607302300200240ustar00rootroot00000000000000#line 1 package Module::Install::Base; use strict 'vars'; use vars qw{$VERSION}; BEGIN { $VERSION = '1.00'; } # Suspend handler for "redefined" warnings BEGIN { my $w = $SIG{__WARN__}; $SIG{__WARN__} = sub { $w }; } #line 42 sub new { my $class = shift; unless ( defined &{"${class}::call"} ) { *{"${class}::call"} = sub { shift->_top->call(@_) }; } unless ( defined &{"${class}::load"} ) { *{"${class}::load"} = sub { shift->_top->load(@_) }; } bless { @_ }, $class; } #line 61 sub AUTOLOAD { local $@; my $func = eval { shift->_top->autoload } or return; goto &$func; } #line 75 sub _top { $_[0]->{_top}; } #line 90 sub admin { $_[0]->_top->{admin} or Module::Install::Base::FakeAdmin->new; } #line 106 sub is_admin { ! $_[0]->admin->isa('Module::Install::Base::FakeAdmin'); } sub DESTROY {} package Module::Install::Base::FakeAdmin; use vars qw{$VERSION}; BEGIN { $VERSION = $Module::Install::Base::VERSION; } my $fake; sub new { $fake ||= bless(\@_, $_[0]); } sub AUTOLOAD {} sub DESTROY {} # Restore warning handler BEGIN { $SIG{__WARN__} = $SIG{__WARN__}->(); } 1; #line 159 prophet-0.750/inc/Module/Install/Can.pm000066400000000000000000000033331160607302300176510ustar00rootroot00000000000000#line 1 package Module::Install::Can; use strict; use Config (); use File::Spec (); use ExtUtils::MakeMaker (); use Module::Install::Base (); use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.00'; @ISA = 'Module::Install::Base'; $ISCORE = 1; } # check if we can load some module ### Upgrade this to not have to load the module if possible sub can_use { my ($self, $mod, $ver) = @_; $mod =~ s{::|\\}{/}g; $mod .= '.pm' unless $mod =~ /\.pm$/i; my $pkg = $mod; $pkg =~ s{/}{::}g; $pkg =~ s{\.pm$}{}i; local $@; eval { require $mod; $pkg->VERSION($ver || 0); 1 }; } # check if we can run some command sub can_run { my ($self, $cmd) = @_; my $_cmd = $cmd; return $_cmd if (-x $_cmd or $_cmd = MM->maybe_command($_cmd)); for my $dir ((split /$Config::Config{path_sep}/, $ENV{PATH}), '.') { next if $dir eq ''; my $abs = File::Spec->catfile($dir, $_[1]); return $abs if (-x $abs or $abs = MM->maybe_command($abs)); } return; } # can we locate a (the) C compiler sub can_cc { my $self = shift; my @chunks = split(/ /, $Config::Config{cc}) or return; # $Config{cc} may contain args; try to find out the program part while (@chunks) { return $self->can_run("@chunks") || (pop(@chunks), next); } return; } # Fix Cygwin bug on maybe_command(); if ( $^O eq 'cygwin' ) { require ExtUtils::MM_Cygwin; require ExtUtils::MM_Win32; if ( ! defined(&ExtUtils::MM_Cygwin::maybe_command) ) { *ExtUtils::MM_Cygwin::maybe_command = sub { my ($self, $file) = @_; if ($file =~ m{^/cygdrive/}i and ExtUtils::MM_Win32->can('maybe_command')) { ExtUtils::MM_Win32->maybe_command($file); } else { ExtUtils::MM_Unix->maybe_command($file); } } } } 1; __END__ #line 156 prophet-0.750/inc/Module/Install/Fetch.pm000066400000000000000000000046271160607302300202100ustar00rootroot00000000000000#line 1 package Module::Install::Fetch; use strict; use Module::Install::Base (); use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.00'; @ISA = 'Module::Install::Base'; $ISCORE = 1; } sub get_file { my ($self, %args) = @_; my ($scheme, $host, $path, $file) = $args{url} =~ m|^(\w+)://([^/]+)(.+)/(.+)| or return; if ( $scheme eq 'http' and ! eval { require LWP::Simple; 1 } ) { $args{url} = $args{ftp_url} or (warn("LWP support unavailable!\n"), return); ($scheme, $host, $path, $file) = $args{url} =~ m|^(\w+)://([^/]+)(.+)/(.+)| or return; } $|++; print "Fetching '$file' from $host... "; unless (eval { require Socket; Socket::inet_aton($host) }) { warn "'$host' resolve failed!\n"; return; } return unless $scheme eq 'ftp' or $scheme eq 'http'; require Cwd; my $dir = Cwd::getcwd(); chdir $args{local_dir} or return if exists $args{local_dir}; if (eval { require LWP::Simple; 1 }) { LWP::Simple::mirror($args{url}, $file); } elsif (eval { require Net::FTP; 1 }) { eval { # use Net::FTP to get past firewall my $ftp = Net::FTP->new($host, Passive => 1, Timeout => 600); $ftp->login("anonymous", 'anonymous@example.com'); $ftp->cwd($path); $ftp->binary; $ftp->get($file) or (warn("$!\n"), return); $ftp->quit; } } elsif (my $ftp = $self->can_run('ftp')) { eval { # no Net::FTP, fallback to ftp.exe require FileHandle; my $fh = FileHandle->new; local $SIG{CHLD} = 'IGNORE'; unless ($fh->open("|$ftp -n")) { warn "Couldn't open ftp: $!\n"; chdir $dir; return; } my @dialog = split(/\n/, <<"END_FTP"); open $host user anonymous anonymous\@example.com cd $path binary get $file $file quit END_FTP foreach (@dialog) { $fh->print("$_\n") } $fh->close; } } else { warn "No working 'ftp' program available!\n"; chdir $dir; return; } unless (-f $file) { warn "Fetching failed: $@\n"; chdir $dir; return; } return if exists $args{size} and -s $file != $args{size}; system($args{run}) if exists $args{run}; unlink($file) if $args{remove}; print(((!exists $args{check_for} or -e $args{check_for}) ? "done!" : "failed! ($!)"), "\n"); chdir $dir; return !$?; } 1; prophet-0.750/inc/Module/Install/Include.pm000066400000000000000000000010151160607302300205260ustar00rootroot00000000000000#line 1 package Module::Install::Include; use strict; use Module::Install::Base (); use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.00'; @ISA = 'Module::Install::Base'; $ISCORE = 1; } sub include { shift()->admin->include(@_); } sub include_deps { shift()->admin->include_deps(@_); } sub auto_include { shift()->admin->auto_include(@_); } sub auto_include_deps { shift()->admin->auto_include_deps(@_); } sub auto_include_dependent_dists { shift()->admin->auto_include_dependent_dists(@_); } 1; prophet-0.750/inc/Module/Install/Makefile.pm000066400000000000000000000270321160607302300206670ustar00rootroot00000000000000#line 1 package Module::Install::Makefile; use strict 'vars'; use ExtUtils::MakeMaker (); use Module::Install::Base (); use Fcntl qw/:flock :seek/; use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.00'; @ISA = 'Module::Install::Base'; $ISCORE = 1; } sub Makefile { $_[0] } my %seen = (); sub prompt { shift; # Infinite loop protection my @c = caller(); if ( ++$seen{"$c[1]|$c[2]|$_[0]"} > 3 ) { die "Caught an potential prompt infinite loop ($c[1]|$c[2]|$_[0])"; } # In automated testing or non-interactive session, always use defaults if ( ($ENV{AUTOMATED_TESTING} or -! -t STDIN) and ! $ENV{PERL_MM_USE_DEFAULT} ) { local $ENV{PERL_MM_USE_DEFAULT} = 1; goto &ExtUtils::MakeMaker::prompt; } else { goto &ExtUtils::MakeMaker::prompt; } } # Store a cleaned up version of the MakeMaker version, # since we need to behave differently in a variety of # ways based on the MM version. my $makemaker = eval $ExtUtils::MakeMaker::VERSION; # If we are passed a param, do a "newer than" comparison. # Otherwise, just return the MakeMaker version. sub makemaker { ( @_ < 2 or $makemaker >= eval($_[1]) ) ? $makemaker : 0 } # Ripped from ExtUtils::MakeMaker 6.56, and slightly modified # as we only need to know here whether the attribute is an array # or a hash or something else (which may or may not be appendable). my %makemaker_argtype = ( C => 'ARRAY', CONFIG => 'ARRAY', # CONFIGURE => 'CODE', # ignore DIR => 'ARRAY', DL_FUNCS => 'HASH', DL_VARS => 'ARRAY', EXCLUDE_EXT => 'ARRAY', EXE_FILES => 'ARRAY', FUNCLIST => 'ARRAY', H => 'ARRAY', IMPORTS => 'HASH', INCLUDE_EXT => 'ARRAY', LIBS => 'ARRAY', # ignore '' MAN1PODS => 'HASH', MAN3PODS => 'HASH', META_ADD => 'HASH', META_MERGE => 'HASH', PL_FILES => 'HASH', PM => 'HASH', PMLIBDIRS => 'ARRAY', PMLIBPARENTDIRS => 'ARRAY', PREREQ_PM => 'HASH', CONFIGURE_REQUIRES => 'HASH', SKIP => 'ARRAY', TYPEMAPS => 'ARRAY', XS => 'HASH', # VERSION => ['version',''], # ignore # _KEEP_AFTER_FLUSH => '', clean => 'HASH', depend => 'HASH', dist => 'HASH', dynamic_lib=> 'HASH', linkext => 'HASH', macro => 'HASH', postamble => 'HASH', realclean => 'HASH', test => 'HASH', tool_autosplit => 'HASH', # special cases where you can use makemaker_append CCFLAGS => 'APPENDABLE', DEFINE => 'APPENDABLE', INC => 'APPENDABLE', LDDLFLAGS => 'APPENDABLE', LDFROM => 'APPENDABLE', ); sub makemaker_args { my ($self, %new_args) = @_; my $args = ( $self->{makemaker_args} ||= {} ); foreach my $key (keys %new_args) { if ($makemaker_argtype{$key}) { if ($makemaker_argtype{$key} eq 'ARRAY') { $args->{$key} = [] unless defined $args->{$key}; unless (ref $args->{$key} eq 'ARRAY') { $args->{$key} = [$args->{$key}] } push @{$args->{$key}}, ref $new_args{$key} eq 'ARRAY' ? @{$new_args{$key}} : $new_args{$key}; } elsif ($makemaker_argtype{$key} eq 'HASH') { $args->{$key} = {} unless defined $args->{$key}; foreach my $skey (keys %{ $new_args{$key} }) { $args->{$key}{$skey} = $new_args{$key}{$skey}; } } elsif ($makemaker_argtype{$key} eq 'APPENDABLE') { $self->makemaker_append($key => $new_args{$key}); } } else { if (defined $args->{$key}) { warn qq{MakeMaker attribute "$key" is overriden; use "makemaker_append" to append values\n}; } $args->{$key} = $new_args{$key}; } } return $args; } # For mm args that take multiple space-seperated args, # append an argument to the current list. sub makemaker_append { my $self = shift; my $name = shift; my $args = $self->makemaker_args; $args->{$name} = defined $args->{$name} ? join( ' ', $args->{$name}, @_ ) : join( ' ', @_ ); } sub build_subdirs { my $self = shift; my $subdirs = $self->makemaker_args->{DIR} ||= []; for my $subdir (@_) { push @$subdirs, $subdir; } } sub clean_files { my $self = shift; my $clean = $self->makemaker_args->{clean} ||= {}; %$clean = ( %$clean, FILES => join ' ', grep { length $_ } ($clean->{FILES} || (), @_), ); } sub realclean_files { my $self = shift; my $realclean = $self->makemaker_args->{realclean} ||= {}; %$realclean = ( %$realclean, FILES => join ' ', grep { length $_ } ($realclean->{FILES} || (), @_), ); } sub libs { my $self = shift; my $libs = ref $_[0] ? shift : [ shift ]; $self->makemaker_args( LIBS => $libs ); } sub inc { my $self = shift; $self->makemaker_args( INC => shift ); } sub _wanted_t { } sub tests_recursive { my $self = shift; my $dir = shift || 't'; unless ( -d $dir ) { die "tests_recursive dir '$dir' does not exist"; } my %tests = map { $_ => 1 } split / /, ($self->tests || ''); require File::Find; File::Find::find( sub { /\.t$/ and -f $_ and $tests{"$File::Find::dir/*.t"} = 1 }, $dir ); $self->tests( join ' ', sort keys %tests ); } sub write { my $self = shift; die "&Makefile->write() takes no arguments\n" if @_; # Check the current Perl version my $perl_version = $self->perl_version; if ( $perl_version ) { eval "use $perl_version; 1" or die "ERROR: perl: Version $] is installed, " . "but we need version >= $perl_version"; } # Make sure we have a new enough MakeMaker require ExtUtils::MakeMaker; if ( $perl_version and $self->_cmp($perl_version, '5.006') >= 0 ) { # MakeMaker can complain about module versions that include # an underscore, even though its own version may contain one! # Hence the funny regexp to get rid of it. See RT #35800 # for details. my $v = $ExtUtils::MakeMaker::VERSION =~ /^(\d+\.\d+)/; $self->build_requires( 'ExtUtils::MakeMaker' => $v ); $self->configure_requires( 'ExtUtils::MakeMaker' => $v ); } else { # Allow legacy-compatibility with 5.005 by depending on the # most recent EU:MM that supported 5.005. $self->build_requires( 'ExtUtils::MakeMaker' => 6.42 ); $self->configure_requires( 'ExtUtils::MakeMaker' => 6.42 ); } # Generate the MakeMaker params my $args = $self->makemaker_args; $args->{DISTNAME} = $self->name; $args->{NAME} = $self->module_name || $self->name; $args->{NAME} =~ s/-/::/g; $args->{VERSION} = $self->version or die <<'EOT'; ERROR: Can't determine distribution version. Please specify it explicitly via 'version' in Makefile.PL, or set a valid $VERSION in a module, and provide its file path via 'version_from' (or 'all_from' if you prefer) in Makefile.PL. EOT $DB::single = 1; if ( $self->tests ) { my @tests = split ' ', $self->tests; my %seen; $args->{test} = { TESTS => (join ' ', grep {!$seen{$_}++} @tests), }; } elsif ( $Module::Install::ExtraTests::use_extratests ) { # Module::Install::ExtraTests doesn't set $self->tests and does its own tests via harness. # So, just ignore our xt tests here. } elsif ( -d 'xt' and ($Module::Install::AUTHOR or $ENV{RELEASE_TESTING}) ) { $args->{test} = { TESTS => join( ' ', map { "$_/*.t" } grep { -d $_ } qw{ t xt } ), }; } if ( $] >= 5.005 ) { $args->{ABSTRACT} = $self->abstract; $args->{AUTHOR} = join ', ', @{$self->author || []}; } if ( $self->makemaker(6.10) ) { $args->{NO_META} = 1; #$args->{NO_MYMETA} = 1; } if ( $self->makemaker(6.17) and $self->sign ) { $args->{SIGN} = 1; } unless ( $self->is_admin ) { delete $args->{SIGN}; } if ( $self->makemaker(6.31) and $self->license ) { $args->{LICENSE} = $self->license; } my $prereq = ($args->{PREREQ_PM} ||= {}); %$prereq = ( %$prereq, map { @$_ } # flatten [module => version] map { @$_ } grep $_, ($self->requires) ); # Remove any reference to perl, PREREQ_PM doesn't support it delete $args->{PREREQ_PM}->{perl}; # Merge both kinds of requires into BUILD_REQUIRES my $build_prereq = ($args->{BUILD_REQUIRES} ||= {}); %$build_prereq = ( %$build_prereq, map { @$_ } # flatten [module => version] map { @$_ } grep $_, ($self->configure_requires, $self->build_requires) ); # Remove any reference to perl, BUILD_REQUIRES doesn't support it delete $args->{BUILD_REQUIRES}->{perl}; # Delete bundled dists from prereq_pm, add it to Makefile DIR my $subdirs = ($args->{DIR} || []); if ($self->bundles) { my %processed; foreach my $bundle (@{ $self->bundles }) { my ($mod_name, $dist_dir) = @$bundle; delete $prereq->{$mod_name}; $dist_dir = File::Basename::basename($dist_dir); # dir for building this module if (not exists $processed{$dist_dir}) { if (-d $dist_dir) { # List as sub-directory to be processed by make push @$subdirs, $dist_dir; } # Else do nothing: the module is already present on the system $processed{$dist_dir} = undef; } } } unless ( $self->makemaker('6.55_03') ) { %$prereq = (%$prereq,%$build_prereq); delete $args->{BUILD_REQUIRES}; } if ( my $perl_version = $self->perl_version ) { eval "use $perl_version; 1" or die "ERROR: perl: Version $] is installed, " . "but we need version >= $perl_version"; if ( $self->makemaker(6.48) ) { $args->{MIN_PERL_VERSION} = $perl_version; } } if ($self->installdirs) { warn qq{old INSTALLDIRS (probably set by makemaker_args) is overriden by installdirs\n} if $args->{INSTALLDIRS}; $args->{INSTALLDIRS} = $self->installdirs; } my %args = map { ( $_ => $args->{$_} ) } grep {defined($args->{$_} ) } keys %$args; my $user_preop = delete $args{dist}->{PREOP}; if ( my $preop = $self->admin->preop($user_preop) ) { foreach my $key ( keys %$preop ) { $args{dist}->{$key} = $preop->{$key}; } } my $mm = ExtUtils::MakeMaker::WriteMakefile(%args); $self->fix_up_makefile($mm->{FIRST_MAKEFILE} || 'Makefile'); } sub fix_up_makefile { my $self = shift; my $makefile_name = shift; my $top_class = ref($self->_top) || ''; my $top_version = $self->_top->VERSION || ''; my $preamble = $self->preamble ? "# Preamble by $top_class $top_version\n" . $self->preamble : ''; my $postamble = "# Postamble by $top_class $top_version\n" . ($self->postamble || ''); local *MAKEFILE; open MAKEFILE, "+< $makefile_name" or die "fix_up_makefile: Couldn't open $makefile_name: $!"; eval { flock MAKEFILE, LOCK_EX }; my $makefile = do { local $/; }; $makefile =~ s/\b(test_harness\(\$\(TEST_VERBOSE\), )/$1'inc', /; $makefile =~ s/( -I\$\(INST_ARCHLIB\))/ -Iinc$1/g; $makefile =~ s/( "-I\$\(INST_LIB\)")/ "-Iinc"$1/g; $makefile =~ s/^(FULLPERL = .*)/$1 "-Iinc"/m; $makefile =~ s/^(PERL = .*)/$1 "-Iinc"/m; # Module::Install will never be used to build the Core Perl # Sometimes PERL_LIB and PERL_ARCHLIB get written anyway, which breaks # PREFIX/PERL5LIB, and thus, install_share. Blank them if they exist $makefile =~ s/^PERL_LIB = .+/PERL_LIB =/m; #$makefile =~ s/^PERL_ARCHLIB = .+/PERL_ARCHLIB =/m; # Perl 5.005 mentions PERL_LIB explicitly, so we have to remove that as well. $makefile =~ s/(\"?)-I\$\(PERL_LIB\)\1//g; # XXX - This is currently unused; not sure if it breaks other MM-users # $makefile =~ s/^pm_to_blib\s+:\s+/pm_to_blib :: /mg; seek MAKEFILE, 0, SEEK_SET; truncate MAKEFILE, 0; print MAKEFILE "$preamble$makefile$postamble" or die $!; close MAKEFILE or die $!; 1; } sub preamble { my ($self, $text) = @_; $self->{preamble} = $text . $self->{preamble} if defined $text; $self->{preamble}; } sub postamble { my ($self, $text) = @_; $self->{postamble} ||= $self->admin->postamble; $self->{postamble} .= $text if defined $text; $self->{postamble} } 1; __END__ #line 541 prophet-0.750/inc/Module/Install/Metadata.pm000066400000000000000000000430201160607302300206650ustar00rootroot00000000000000#line 1 package Module::Install::Metadata; use strict 'vars'; use Module::Install::Base (); use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.00'; @ISA = 'Module::Install::Base'; $ISCORE = 1; } my @boolean_keys = qw{ sign }; my @scalar_keys = qw{ name module_name abstract version distribution_type tests installdirs }; my @tuple_keys = qw{ configure_requires build_requires requires recommends bundles resources }; my @resource_keys = qw{ homepage bugtracker repository }; my @array_keys = qw{ keywords author }; *authors = \&author; sub Meta { shift } sub Meta_BooleanKeys { @boolean_keys } sub Meta_ScalarKeys { @scalar_keys } sub Meta_TupleKeys { @tuple_keys } sub Meta_ResourceKeys { @resource_keys } sub Meta_ArrayKeys { @array_keys } foreach my $key ( @boolean_keys ) { *$key = sub { my $self = shift; if ( defined wantarray and not @_ ) { return $self->{values}->{$key}; } $self->{values}->{$key} = ( @_ ? $_[0] : 1 ); return $self; }; } foreach my $key ( @scalar_keys ) { *$key = sub { my $self = shift; return $self->{values}->{$key} if defined wantarray and !@_; $self->{values}->{$key} = shift; return $self; }; } foreach my $key ( @array_keys ) { *$key = sub { my $self = shift; return $self->{values}->{$key} if defined wantarray and !@_; $self->{values}->{$key} ||= []; push @{$self->{values}->{$key}}, @_; return $self; }; } foreach my $key ( @resource_keys ) { *$key = sub { my $self = shift; unless ( @_ ) { return () unless $self->{values}->{resources}; return map { $_->[1] } grep { $_->[0] eq $key } @{ $self->{values}->{resources} }; } return $self->{values}->{resources}->{$key} unless @_; my $uri = shift or die( "Did not provide a value to $key()" ); $self->resources( $key => $uri ); return 1; }; } foreach my $key ( grep { $_ ne "resources" } @tuple_keys) { *$key = sub { my $self = shift; return $self->{values}->{$key} unless @_; my @added; while ( @_ ) { my $module = shift or last; my $version = shift || 0; push @added, [ $module, $version ]; } push @{ $self->{values}->{$key} }, @added; return map {@$_} @added; }; } # Resource handling my %lc_resource = map { $_ => 1 } qw{ homepage license bugtracker repository }; sub resources { my $self = shift; while ( @_ ) { my $name = shift or last; my $value = shift or next; if ( $name eq lc $name and ! $lc_resource{$name} ) { die("Unsupported reserved lowercase resource '$name'"); } $self->{values}->{resources} ||= []; push @{ $self->{values}->{resources} }, [ $name, $value ]; } $self->{values}->{resources}; } # Aliases for build_requires that will have alternative # meanings in some future version of META.yml. sub test_requires { shift->build_requires(@_) } sub install_requires { shift->build_requires(@_) } # Aliases for installdirs options sub install_as_core { $_[0]->installdirs('perl') } sub install_as_cpan { $_[0]->installdirs('site') } sub install_as_site { $_[0]->installdirs('site') } sub install_as_vendor { $_[0]->installdirs('vendor') } sub dynamic_config { my $self = shift; unless ( @_ ) { warn "You MUST provide an explicit true/false value to dynamic_config\n"; return $self; } $self->{values}->{dynamic_config} = $_[0] ? 1 : 0; return 1; } sub perl_version { my $self = shift; return $self->{values}->{perl_version} unless @_; my $version = shift or die( "Did not provide a value to perl_version()" ); # Normalize the version $version = $self->_perl_version($version); # We don't support the reall old versions unless ( $version >= 5.005 ) { die "Module::Install only supports 5.005 or newer (use ExtUtils::MakeMaker)\n"; } $self->{values}->{perl_version} = $version; } sub all_from { my ( $self, $file ) = @_; unless ( defined($file) ) { my $name = $self->name or die( "all_from called with no args without setting name() first" ); $file = join('/', 'lib', split(/-/, $name)) . '.pm'; $file =~ s{.*/}{} unless -e $file; unless ( -e $file ) { die("all_from cannot find $file from $name"); } } unless ( -f $file ) { die("The path '$file' does not exist, or is not a file"); } $self->{values}{all_from} = $file; # Some methods pull from POD instead of code. # If there is a matching .pod, use that instead my $pod = $file; $pod =~ s/\.pm$/.pod/i; $pod = $file unless -e $pod; # Pull the different values $self->name_from($file) unless $self->name; $self->version_from($file) unless $self->version; $self->perl_version_from($file) unless $self->perl_version; $self->author_from($pod) unless @{$self->author || []}; $self->license_from($pod) unless $self->license; $self->abstract_from($pod) unless $self->abstract; return 1; } sub provides { my $self = shift; my $provides = ( $self->{values}->{provides} ||= {} ); %$provides = (%$provides, @_) if @_; return $provides; } sub auto_provides { my $self = shift; return $self unless $self->is_admin; unless (-e 'MANIFEST') { warn "Cannot deduce auto_provides without a MANIFEST, skipping\n"; return $self; } # Avoid spurious warnings as we are not checking manifest here. local $SIG{__WARN__} = sub {1}; require ExtUtils::Manifest; local *ExtUtils::Manifest::manicheck = sub { return }; require Module::Build; my $build = Module::Build->new( dist_name => $self->name, dist_version => $self->version, license => $self->license, ); $self->provides( %{ $build->find_dist_packages || {} } ); } sub feature { my $self = shift; my $name = shift; my $features = ( $self->{values}->{features} ||= [] ); my $mods; if ( @_ == 1 and ref( $_[0] ) ) { # The user used ->feature like ->features by passing in the second # argument as a reference. Accomodate for that. $mods = $_[0]; } else { $mods = \@_; } my $count = 0; push @$features, ( $name => [ map { ref($_) ? ( ref($_) eq 'HASH' ) ? %$_ : @$_ : $_ } @$mods ] ); return @$features; } sub features { my $self = shift; while ( my ( $name, $mods ) = splice( @_, 0, 2 ) ) { $self->feature( $name, @$mods ); } return $self->{values}->{features} ? @{ $self->{values}->{features} } : (); } sub no_index { my $self = shift; my $type = shift; push @{ $self->{values}->{no_index}->{$type} }, @_ if $type; return $self->{values}->{no_index}; } sub read { my $self = shift; $self->include_deps( 'YAML::Tiny', 0 ); require YAML::Tiny; my $data = YAML::Tiny::LoadFile('META.yml'); # Call methods explicitly in case user has already set some values. while ( my ( $key, $value ) = each %$data ) { next unless $self->can($key); if ( ref $value eq 'HASH' ) { while ( my ( $module, $version ) = each %$value ) { $self->can($key)->($self, $module => $version ); } } else { $self->can($key)->($self, $value); } } return $self; } sub write { my $self = shift; return $self unless $self->is_admin; $self->admin->write_meta; return $self; } sub version_from { require ExtUtils::MM_Unix; my ( $self, $file ) = @_; $self->version( ExtUtils::MM_Unix->parse_version($file) ); # for version integrity check $self->makemaker_args( VERSION_FROM => $file ); } sub abstract_from { require ExtUtils::MM_Unix; my ( $self, $file ) = @_; $self->abstract( bless( { DISTNAME => $self->name }, 'ExtUtils::MM_Unix' )->parse_abstract($file) ); } # Add both distribution and module name sub name_from { my ($self, $file) = @_; if ( Module::Install::_read($file) =~ m/ ^ \s* package \s* ([\w:]+) \s* ; /ixms ) { my ($name, $module_name) = ($1, $1); $name =~ s{::}{-}g; $self->name($name); unless ( $self->module_name ) { $self->module_name($module_name); } } else { die("Cannot determine name from $file\n"); } } sub _extract_perl_version { if ( $_[0] =~ m/ ^\s* (?:use|require) \s* v? ([\d_\.]+) \s* ; /ixms ) { my $perl_version = $1; $perl_version =~ s{_}{}g; return $perl_version; } else { return; } } sub perl_version_from { my $self = shift; my $perl_version=_extract_perl_version(Module::Install::_read($_[0])); if ($perl_version) { $self->perl_version($perl_version); } else { warn "Cannot determine perl version info from $_[0]\n"; return; } } sub author_from { my $self = shift; my $content = Module::Install::_read($_[0]); if ($content =~ m/ =head \d \s+ (?:authors?)\b \s* ([^\n]*) | =head \d \s+ (?:licen[cs]e|licensing|copyright|legal)\b \s* .*? copyright .*? \d\d\d[\d.]+ \s* (?:\bby\b)? \s* ([^\n]*) /ixms) { my $author = $1 || $2; # XXX: ugly but should work anyway... if (eval "require Pod::Escapes; 1") { # Pod::Escapes has a mapping table. # It's in core of perl >= 5.9.3, and should be installed # as one of the Pod::Simple's prereqs, which is a prereq # of Pod::Text 3.x (see also below). $author =~ s{ E<( (\d+) | ([A-Za-z]+) )> } { defined $2 ? chr($2) : defined $Pod::Escapes::Name2character_number{$1} ? chr($Pod::Escapes::Name2character_number{$1}) : do { warn "Unknown escape: E<$1>"; "E<$1>"; }; }gex; } elsif (eval "require Pod::Text; 1" && $Pod::Text::VERSION < 3) { # Pod::Text < 3.0 has yet another mapping table, # though the table name of 2.x and 1.x are different. # (1.x is in core of Perl < 5.6, 2.x is in core of # Perl < 5.9.3) my $mapping = ($Pod::Text::VERSION < 2) ? \%Pod::Text::HTML_Escapes : \%Pod::Text::ESCAPES; $author =~ s{ E<( (\d+) | ([A-Za-z]+) )> } { defined $2 ? chr($2) : defined $mapping->{$1} ? $mapping->{$1} : do { warn "Unknown escape: E<$1>"; "E<$1>"; }; }gex; } else { $author =~ s{E}{<}g; $author =~ s{E}{>}g; } $self->author($author); } else { warn "Cannot determine author info from $_[0]\n"; } } #Stolen from M::B my %license_urls = ( perl => 'http://dev.perl.org/licenses/', apache => 'http://apache.org/licenses/LICENSE-2.0', apache_1_1 => 'http://apache.org/licenses/LICENSE-1.1', artistic => 'http://opensource.org/licenses/artistic-license.php', artistic_2 => 'http://opensource.org/licenses/artistic-license-2.0.php', lgpl => 'http://opensource.org/licenses/lgpl-license.php', lgpl2 => 'http://opensource.org/licenses/lgpl-2.1.php', lgpl3 => 'http://opensource.org/licenses/lgpl-3.0.html', bsd => 'http://opensource.org/licenses/bsd-license.php', gpl => 'http://opensource.org/licenses/gpl-license.php', gpl2 => 'http://opensource.org/licenses/gpl-2.0.php', gpl3 => 'http://opensource.org/licenses/gpl-3.0.html', mit => 'http://opensource.org/licenses/mit-license.php', mozilla => 'http://opensource.org/licenses/mozilla1.1.php', open_source => undef, unrestricted => undef, restrictive => undef, unknown => undef, ); sub license { my $self = shift; return $self->{values}->{license} unless @_; my $license = shift or die( 'Did not provide a value to license()' ); $license = __extract_license($license) || lc $license; $self->{values}->{license} = $license; # Automatically fill in license URLs if ( $license_urls{$license} ) { $self->resources( license => $license_urls{$license} ); } return 1; } sub _extract_license { my $pod = shift; my $matched; return __extract_license( ($matched) = $pod =~ m/ (=head \d \s+ L(?i:ICEN[CS]E|ICENSING)\b.*?) (=head \d.*|=cut.*|)\z /xms ) || __extract_license( ($matched) = $pod =~ m/ (=head \d \s+ (?:C(?i:OPYRIGHTS?)|L(?i:EGAL))\b.*?) (=head \d.*|=cut.*|)\z /xms ); } sub __extract_license { my $license_text = shift or return; my @phrases = ( '(?:under )?the same (?:terms|license) as (?:perl|the perl (?:\d )?programming language)' => 'perl', 1, '(?:under )?the terms of (?:perl|the perl programming language) itself' => 'perl', 1, 'Artistic and GPL' => 'perl', 1, 'GNU general public license' => 'gpl', 1, 'GNU public license' => 'gpl', 1, 'GNU lesser general public license' => 'lgpl', 1, 'GNU lesser public license' => 'lgpl', 1, 'GNU library general public license' => 'lgpl', 1, 'GNU library public license' => 'lgpl', 1, 'GNU Free Documentation license' => 'unrestricted', 1, 'GNU Affero General Public License' => 'open_source', 1, '(?:Free)?BSD license' => 'bsd', 1, 'Artistic license' => 'artistic', 1, 'Apache (?:Software )?license' => 'apache', 1, 'GPL' => 'gpl', 1, 'LGPL' => 'lgpl', 1, 'BSD' => 'bsd', 1, 'Artistic' => 'artistic', 1, 'MIT' => 'mit', 1, 'Mozilla Public License' => 'mozilla', 1, 'Q Public License' => 'open_source', 1, 'OpenSSL License' => 'unrestricted', 1, 'SSLeay License' => 'unrestricted', 1, 'zlib License' => 'open_source', 1, 'proprietary' => 'proprietary', 0, ); while ( my ($pattern, $license, $osi) = splice(@phrases, 0, 3) ) { $pattern =~ s#\s+#\\s+#gs; if ( $license_text =~ /\b$pattern\b/i ) { return $license; } } return ''; } sub license_from { my $self = shift; if (my $license=_extract_license(Module::Install::_read($_[0]))) { $self->license($license); } else { warn "Cannot determine license info from $_[0]\n"; return 'unknown'; } } sub _extract_bugtracker { my @links = $_[0] =~ m#L<( \Qhttp://rt.cpan.org/\E[^>]+| \Qhttp://github.com/\E[\w_]+/[\w_]+/issues| \Qhttp://code.google.com/p/\E[\w_\-]+/issues/list )>#gx; my %links; @links{@links}=(); @links=keys %links; return @links; } sub bugtracker_from { my $self = shift; my $content = Module::Install::_read($_[0]); my @links = _extract_bugtracker($content); unless ( @links ) { warn "Cannot determine bugtracker info from $_[0]\n"; return 0; } if ( @links > 1 ) { warn "Found more than one bugtracker link in $_[0]\n"; return 0; } # Set the bugtracker bugtracker( $links[0] ); return 1; } sub requires_from { my $self = shift; my $content = Module::Install::_readperl($_[0]); my @requires = $content =~ m/^use\s+([^\W\d]\w*(?:::\w+)*)\s+([\d\.]+)/mg; while ( @requires ) { my $module = shift @requires; my $version = shift @requires; $self->requires( $module => $version ); } } sub test_requires_from { my $self = shift; my $content = Module::Install::_readperl($_[0]); my @requires = $content =~ m/^use\s+([^\W\d]\w*(?:::\w+)*)\s+([\d\.]+)/mg; while ( @requires ) { my $module = shift @requires; my $version = shift @requires; $self->test_requires( $module => $version ); } } # Convert triple-part versions (eg, 5.6.1 or 5.8.9) to # numbers (eg, 5.006001 or 5.008009). # Also, convert double-part versions (eg, 5.8) sub _perl_version { my $v = $_[-1]; $v =~ s/^([1-9])\.([1-9]\d?\d?)$/sprintf("%d.%03d",$1,$2)/e; $v =~ s/^([1-9])\.([1-9]\d?\d?)\.(0|[1-9]\d?\d?)$/sprintf("%d.%03d%03d",$1,$2,$3 || 0)/e; $v =~ s/(\.\d\d\d)000$/$1/; $v =~ s/_.+$//; if ( ref($v) ) { # Numify $v = $v + 0; } return $v; } sub add_metadata { my $self = shift; my %hash = @_; for my $key (keys %hash) { warn "add_metadata: $key is not prefixed with 'x_'.\n" . "Use appopriate function to add non-private metadata.\n" unless $key =~ /^x_/; $self->{values}->{$key} = $hash{$key}; } } ###################################################################### # MYMETA Support sub WriteMyMeta { die "WriteMyMeta has been deprecated"; } sub write_mymeta_yaml { my $self = shift; # We need YAML::Tiny to write the MYMETA.yml file unless ( eval { require YAML::Tiny; 1; } ) { return 1; } # Generate the data my $meta = $self->_write_mymeta_data or return 1; # Save as the MYMETA.yml file print "Writing MYMETA.yml\n"; YAML::Tiny::DumpFile('MYMETA.yml', $meta); } sub write_mymeta_json { my $self = shift; # We need JSON to write the MYMETA.json file unless ( eval { require JSON; 1; } ) { return 1; } # Generate the data my $meta = $self->_write_mymeta_data or return 1; # Save as the MYMETA.yml file print "Writing MYMETA.json\n"; Module::Install::_write( 'MYMETA.json', JSON->new->pretty(1)->canonical->encode($meta), ); } sub _write_mymeta_data { my $self = shift; # If there's no existing META.yml there is nothing we can do return undef unless -f 'META.yml'; # We need Parse::CPAN::Meta to load the file unless ( eval { require Parse::CPAN::Meta; 1; } ) { return undef; } # Merge the perl version into the dependencies my $val = $self->Meta->{values}; my $perl = delete $val->{perl_version}; if ( $perl ) { $val->{requires} ||= []; my $requires = $val->{requires}; # Canonize to three-dot version after Perl 5.6 if ( $perl >= 5.006 ) { $perl =~ s{^(\d+)\.(\d\d\d)(\d*)}{join('.', $1, int($2||0), int($3||0))}e } unshift @$requires, [ perl => $perl ]; } # Load the advisory META.yml file my @yaml = Parse::CPAN::Meta::LoadFile('META.yml'); my $meta = $yaml[0]; # Overwrite the non-configure dependency hashs delete $meta->{requires}; delete $meta->{build_requires}; delete $meta->{recommends}; if ( exists $val->{requires} ) { $meta->{requires} = { map { @$_ } @{ $val->{requires} } }; } if ( exists $val->{build_requires} ) { $meta->{build_requires} = { map { @$_ } @{ $val->{build_requires} } }; } return $meta; } 1; prophet-0.750/inc/Module/Install/Share.pm000066400000000000000000000046331160607302300202160ustar00rootroot00000000000000#line 1 package Module::Install::Share; use strict; use Module::Install::Base (); use File::Find (); use ExtUtils::Manifest (); use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.00'; @ISA = 'Module::Install::Base'; $ISCORE = 1; } sub install_share { my $self = shift; my $dir = @_ ? pop : 'share'; my $type = @_ ? shift : 'dist'; unless ( defined $type and $type eq 'module' or $type eq 'dist' ) { die "Illegal or invalid share dir type '$type'"; } unless ( defined $dir and -d $dir ) { require Carp; Carp::croak("Illegal or missing directory install_share param"); } # Split by type my $S = ($^O eq 'MSWin32') ? "\\" : "\/"; my $root; if ( $type eq 'dist' ) { die "Too many parameters to install_share" if @_; # Set up the install $root = "\$(INST_LIB)${S}auto${S}share${S}dist${S}\$(DISTNAME)"; } else { my $module = Module::Install::_CLASS($_[0]); unless ( defined $module ) { die "Missing or invalid module name '$_[0]'"; } $module =~ s/::/-/g; $root = "\$(INST_LIB)${S}auto${S}share${S}module${S}$module"; } my $manifest = -r 'MANIFEST' ? ExtUtils::Manifest::maniread() : undef; my $skip_checker = $ExtUtils::Manifest::VERSION >= 1.54 ? ExtUtils::Manifest::maniskip() : ExtUtils::Manifest::_maniskip(); my $postamble = ''; my $perm_dir = eval($ExtUtils::MakeMaker::VERSION) >= 6.52 ? '$(PERM_DIR)' : 755; File::Find::find({ no_chdir => 1, wanted => sub { my $path = File::Spec->abs2rel($_, $dir); if (-d $_) { return if $skip_checker->($File::Find::name); $postamble .=<<"END"; \t\$(NOECHO) \$(MKPATH) "$root${S}$path" \t\$(NOECHO) \$(CHMOD) $perm_dir "$root${S}$path" END } else { return if ref $manifest && !exists $manifest->{$File::Find::name}; return if $skip_checker->($File::Find::name); $postamble .=<<"END"; \t\$(NOECHO) \$(CP) "$dir${S}$path" "$root${S}$path" END } }, }, $dir); # Set up the install $self->postamble(<<"END_MAKEFILE"); config :: $postamble END_MAKEFILE # The above appears to behave incorrectly when used with old versions # of ExtUtils::Install (known-bad on RHEL 3, with 5.8.0) # So when we need to install a share directory, make sure we add a # dependency on a moderately new version of ExtUtils::MakeMaker. $self->build_requires( 'ExtUtils::MakeMaker' => '6.11' ); # 99% of the time we don't want to index a shared dir $self->no_index( directory => $dir ); } 1; __END__ #line 154 prophet-0.750/inc/Module/Install/Win32.pm000066400000000000000000000034031160607302300200500ustar00rootroot00000000000000#line 1 package Module::Install::Win32; use strict; use Module::Install::Base (); use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.00'; @ISA = 'Module::Install::Base'; $ISCORE = 1; } # determine if the user needs nmake, and download it if needed sub check_nmake { my $self = shift; $self->load('can_run'); $self->load('get_file'); require Config; return unless ( $^O eq 'MSWin32' and $Config::Config{make} and $Config::Config{make} =~ /^nmake\b/i and ! $self->can_run('nmake') ); print "The required 'nmake' executable not found, fetching it...\n"; require File::Basename; my $rv = $self->get_file( url => 'http://download.microsoft.com/download/vc15/Patch/1.52/W95/EN-US/Nmake15.exe', ftp_url => 'ftp://ftp.microsoft.com/Softlib/MSLFILES/Nmake15.exe', local_dir => File::Basename::dirname($^X), size => 51928, run => 'Nmake15.exe /o > nul', check_for => 'Nmake.exe', remove => 1, ); die <<'END_MESSAGE' unless $rv; ------------------------------------------------------------------------------- Since you are using Microsoft Windows, you will need the 'nmake' utility before installation. It's available at: http://download.microsoft.com/download/vc15/Patch/1.52/W95/EN-US/Nmake15.exe or ftp://ftp.microsoft.com/Softlib/MSLFILES/Nmake15.exe Please download the file manually, save it to a directory in %PATH% (e.g. C:\WINDOWS\COMMAND\), then launch the MS-DOS command line shell, "cd" to that directory, and run "Nmake15.exe" from there; that will create the 'nmake.exe' file needed by this module. You may then resume the installation process described in README. ------------------------------------------------------------------------------- END_MESSAGE } 1; prophet-0.750/inc/Module/Install/WriteAll.pm000066400000000000000000000023761160607302300207010ustar00rootroot00000000000000#line 1 package Module::Install::WriteAll; use strict; use Module::Install::Base (); use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.00'; @ISA = qw{Module::Install::Base}; $ISCORE = 1; } sub WriteAll { my $self = shift; my %args = ( meta => 1, sign => 0, inline => 0, check_nmake => 1, @_, ); $self->sign(1) if $args{sign}; $self->admin->WriteAll(%args) if $self->is_admin; $self->check_nmake if $args{check_nmake}; unless ( $self->makemaker_args->{PL_FILES} ) { # XXX: This still may be a bit over-defensive... unless ($self->makemaker(6.25)) { $self->makemaker_args( PL_FILES => {} ) if -f 'Build.PL'; } } # Until ExtUtils::MakeMaker support MYMETA.yml, make sure # we clean it up properly ourself. $self->realclean_files('MYMETA.yml'); if ( $args{inline} ) { $self->Inline->write; } else { $self->Makefile->write; } # The Makefile write process adds a couple of dependencies, # so write the META.yml files after the Makefile. if ( $args{meta} ) { $self->Meta->write; } # Experimental support for MYMETA if ( $ENV{X_MYMETA} ) { if ( $ENV{X_MYMETA} eq 'JSON' ) { $self->Meta->write_mymeta_json; } else { $self->Meta->write_mymeta_yaml; } } return 1; } 1; prophet-0.750/lib/000077500000000000000000000000001160607302300137525ustar00rootroot00000000000000prophet-0.750/lib/Prophet.pm000066400000000000000000000014031160607302300157270ustar00rootroot00000000000000use warnings; use strict; package Prophet; our $VERSION = '0.750'; =head1 NAME Prophet =head1 DESCRIPTION Prophet is a distributed database system designed for small to medium scale social database applications. Our early targets include things such as bug tracking. =head2 Design goals =over =item Arbitrary record schema =item Replication =item Disconnected operation =item Peer to peer synchronization =back =head2 Design constraints =over =item Scaling We don't currently intend for the first implementation of Prophet to scale to databases with millions of rows or hundreds of concurrent users. There's nothing that makes the design infeasible, but the infrastructure necessary for such a system will...needlessly hamstring it. =back =cut 1; prophet-0.750/lib/Prophet/000077500000000000000000000000001160607302300153735ustar00rootroot00000000000000prophet-0.750/lib/Prophet/App.pm000066400000000000000000000137621160607302300164620ustar00rootroot00000000000000package Prophet::App; use Any::Moose; use File::Spec (); use Prophet::Config; use Prophet::UUIDGenerator; use Params::Validate qw/validate validate_pos/; has handle => ( is => 'rw', isa => 'Prophet::Replica', lazy => 1, default => sub { my $self = shift; if ( defined $self->local_replica_url && $self->local_replica_url !~ /^[\w\+]{2,}\:/ ) { # the reason why we need {2,} is to not match name on windows, e.g. C:\foo my $path = $self->local_replica_url; $path = File::Spec->rel2abs(glob($path)) unless File::Spec->file_name_is_absolute($path); $self->local_replica_url("file://$path"); } return Prophet::Replica->get_handle( url => $self->local_replica_url, app_handle => $self, ); }, ); has config => ( is => 'rw', isa => 'Prophet::Config', default => sub { my $self = shift; return Prophet::Config->new( app_handle => $self, confname => 'prophetrc', ); }, documentation => "This is the config instance for the running application", ); use constant DEFAULT_REPLICA_TYPE => 'prophet'; =head1 NAME Prophet::App =head1 SYNOPSIS =head1 METHODS =head2 BUILD =cut =head2 default_replica_type Returns a string of the the default replica type for this application. =cut sub default_replica_type { my $self = shift; return $ENV{'PROPHET_REPLICA_TYPE'} || DEFAULT_REPLICA_TYPE; } =head2 local_replica_url Returns the URL of the current local replica. If no URL has been provided (usually via C<$ENV{PROPHET_REPO}>), returns undef. =cut sub local_replica_url { my $self = shift; if (@_) { $ENV{'PROPHET_REPO'} = shift; } return $ENV{'PROPHET_REPO'} || undef; } =head2 require =cut sub require { my $self = shift; my $class = shift; $self->_require(module => $class); } =head2 try_to_require =cut sub try_to_require { my $self = shift; my $class = shift; $self->_require(module => $class, quiet => 1); } =head2 _require =cut sub _require { my $self = shift; my %args = ( module => undef, quiet => undef, @_); my $class = $args{'module'}; # Quick hack to silence warnings. # Maybe some dependencies were lost. unless ($class) { warn sprintf("no class was given at %s line %d\n", (caller)[1,2]); return 0; } return 1 if $self->already_required($class); # .pm might already be there in a weird interaction in Module::Pluggable my $file = $class; $file .= ".pm" unless $file =~ /\.pm$/; $file =~ s/::/\//g; my $retval = eval { local $SIG{__DIE__} = 'DEFAULT'; CORE::require "$file" }; my $error = $@; if (my $message = $error) { $message =~ s/ at .*?\n$//; if ($args{'quiet'} and $message =~ /^Can't locate \Q$file\E/) { return 0; } elsif ( $error !~ /^Can't locate $file/) { die $error; } else { warn sprintf("$message at %s line %d\n", (caller(1))[1,2]); return 0; } } return 1; } =head2 already_required class Helper function to test whether a given class has already been require'd. =cut sub already_required { my ($self, $class) = @_; return 0 if $class =~ /::$/; # malformed class my $path = join('/', split(/::/,$class)).".pm"; return ( $INC{$path} ? 1 : 0); } sub set_db_defaults { my $self = shift; my $settings = $self->database_settings; for my $name ( keys %$settings ) { my ($uuid, @metadata) = @{$settings->{$name}}; my $s = $self->setting( label => $name, uuid => $uuid, default => \@metadata, ); $s->initialize; } } sub setting { my $self = shift; my %args = validate( @_, { uuid => 0, default => 0, label => 0 } ); require Prophet::DatabaseSetting; my ($uuid, $default); if ( $args{uuid} ) { $uuid = $args{'uuid'}; $default = $args{'default'}; } elsif ( $args{'label'} ) { ($uuid, $default) = @{ $self->database_settings->{ $args{'label'} }}; } return Prophet::DatabaseSetting->new( handle => $self->handle, uuid => $uuid, default => $default, label => $args{label} ); } sub database_settings { {} } # XXX wants a better name =head3 log $MSG Logs the given message to C (but only if the C environmental variable is set). =cut sub log_debug { my $self = shift; return unless ($ENV{'PROPHET_DEBUG'}); $self->log(@_); } sub log { my $self = shift; my ($msg) = validate_pos(@_, 1); print STDERR $msg."\n";# if ($ENV{'PROPHET_DEBUG'}); } =head2 log_fatal $MSG Logs the given message and dies with a stack trace. =cut sub log_fatal { my $self = shift; # always skip this fatal_error function when generating a stack trace local $Carp::CarpLevel = $Carp::CarpLevel + 1; $self->log(@_); Carp::confess(@_); } sub current_user_email { my $self = shift; return $self->config->get( key => 'user.email-address' ) || $ENV{'PROPHET_EMAIL'} || $ENV{'EMAIL'}; } =head2 display_name_for_replica UUID Returns a "friendly" id for the replica with the given uuid. UUIDs are for computers, friendly names are for people. If no name is found, the friendly name is just the UUID. =cut # friendly names are replica subsections in the config file use Memoize; memoize('display_name_for_replica'); sub display_name_for_replica { my $self = shift; my $uuid = shift; return 'Unknown replica!' unless $uuid; my %possibilities = $self->config->get_regexp( key => '^replica\..*\.uuid$' ); # form a hash of uuid -> name my %sources_by_uuid = map { my $uuid = $possibilities{$_}; $_ =~ /^replica\.(.*)\.uuid$/; my $name = $1; ( $uuid => $name ); } keys %possibilities; return exists $sources_by_uuid{$uuid} ? $sources_by_uuid{$uuid} : $uuid; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI.pm000066400000000000000000000136531160607302300163500ustar00rootroot00000000000000package Prophet::CLI; use Any::Moose; use Prophet; use Prophet::Replica; use Prophet::CLI::Command; use Prophet::CLI::Dispatcher; use Prophet::CLIContext; use Prophet::Record; use List::Util 'first'; use Text::ParseWords qw(shellwords); has app_class => ( is => 'rw', isa => 'ClassName', default => 'Prophet::App', ); has dispatcher_class => ( is => 'rw', isa => 'ClassName', lazy => 1, default => sub { my $self = shift; my $app_class = $self->app_class; my $class = $app_class .'::CLI::Dispatcher'; return $class if $app_class->try_to_require( $class ); return 'Prophet::CLI::Dispatcher'; }, ); has record_class => ( is => 'rw', isa => 'ClassName', lazy => 1, default => 'Prophet::Record', ); has app_handle => ( is => 'rw', isa => 'Prophet::App', lazy => 1, handles => [qw/handle config/], default => sub { return $_[0]->app_class->new; }, ); has context => ( is => 'rw', isa => 'Prophet::CLIContext', lazy => 1, default => sub { return Prophet::CLIContext->new( app_handle => shift->app_handle); } ); has interactive_shell => ( is => 'rw', isa => 'Bool', default => 0, ); # default line length for CLI-related things that ought to wrap use constant LINE_LENGTH => 80; =head2 _record_cmd handles the subcommand for a particular type =cut =head2 dispatcher_class -> Class Returns class name of the dispatcher used to dispatch command lines. By default app_class::CLI::Dispatcher is used if it can be loaded otherwise L. Override using: has '+dispatcher_class' => ( default => 'MyApp::Dispatcher' ); =head2 run_one_command Runs a command specified by commandline arguments given in an ARGV-like array of argumnents and key value pairs . To use in a commandline front-end, create a L object and pass in your main app class as app_class, then run this routine. Example: my $cli = Prophet::CLI->new({ app_class => 'App::SD' }); $cli->run_one_command(@ARGV); =cut sub run_one_command { my $self = shift; my @args = (@_); # find the first alias that matches, rerun the aliased cmd # note: keys of aliases are treated as regex, # we need to substitute $1, $2 ... in the value if there's any my $ori_cmd = join ' ', @args; if ($self->app_handle->local_replica_url) { my $aliases = $self->app_handle->config->aliases; while (my ($alias, $replacement) = each %$aliases ) { my $command = $self->_command_matches_alias( \@args, $alias, $replacement, ) || next; # we don't want to recursively call if people stupidly write # alias pull --local = pull --local next if ( join(' ', @$command) eq $ori_cmd ); return $self->run_one_command(@$command); } } # really, we shouldn't be doing this stuff from the command dispatcher $self->context( Prophet::CLIContext->new( app_handle => $self->app_handle ) ); $self->context->setup_from_args(@args); my $dispatcher = $self->dispatcher_class->new; # Path::Dispatcher is string-based, so we need to join the args # hash with spaces before passing off (args with whitespace in # them are quoted, double quotes are escaped) my $dispatch_command_string = join(' ', map { s/"/\\"/g; # escape double quotes /\s/ ? qq{"$_"} : $_; } @{ $self->context->primary_commands }); local $Prophet::CLI::Dispatcher::cli = $self; my $dispatch = $dispatcher->dispatch( $dispatch_command_string ); $self->start_pager(); $dispatch->run($dispatcher); $self->end_pager(); } sub _command_matches_alias { my $self = shift; my @words = @{+shift}; my @alias = shellwords(shift); my @expansion = shellwords(shift); # Compare @words against @alias return if(scalar(@words) < scalar(@alias)); while(@alias) { if(shift @words ne shift @alias) { return; } } # @words now contains the remaining words given on the # command-line, and @expansion contains the words in the # expansion. if (first sub {m{\$\d+\b}}, @expansion) { # Expand $n placeholders for (@expansion) { s/\$(\d+)\b/$words[$1 - 1]||""/ge; } return [@expansion]; } else { return [@expansion, @words]; } } sub is_interactive { return -t STDIN && -t STDOUT; } sub get_pager { my $self = shift; return $ENV{'PAGER'} || `which less` || `which more`; } our $ORIGINAL_STDOUT; sub start_pager { my $self = shift; my $content = shift; if (is_interactive() && !$ORIGINAL_STDOUT) { local $ENV{'LESS'} ||= '-FXe'; local $ENV{'MORE'}; $ENV{'MORE'} ||= '-FXe' unless $^O =~ /^MSWin/; my $pager = $self->get_pager(); return unless $pager; open (my $cmd, "|-", $pager) || return; $|++; $ORIGINAL_STDOUT = *STDOUT; # $pager will be closed once we restore STDOUT to $ORIGINAL_STDOUT *STDOUT = $cmd; } } sub in_pager { return $ORIGINAL_STDOUT ? 1 :0; } sub end_pager { my $self = shift; return unless ($self->in_pager); *STDOUT = $ORIGINAL_STDOUT ; # closes the pager $ORIGINAL_STDOUT = undef; } =head2 get_script_name Return the name of the script that was run. This is the empty string if we're in a shell, otherwise the script name concatenated with a space character. This is so you can just use this for e.g. printing usage messages or help docs that might be run from either a shell or the command line. =cut sub get_script_name { my $self = shift; return '' if $self->interactive_shell; require File::Spec; my ($cmd) = ( File::Spec->splitpath($0) )[2]; return $cmd . ' '; } END { *STDOUT = $ORIGINAL_STDOUT if $ORIGINAL_STDOUT; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/000077500000000000000000000000001160607302300160025ustar00rootroot00000000000000prophet-0.750/lib/Prophet/CLI/CollectionCommand.pm000066400000000000000000000011211160607302300217250ustar00rootroot00000000000000package Prophet::CLI::CollectionCommand; use Any::Moose 'Role'; with 'Prophet::CLI::RecordCommand'; use Params::Validate; sub get_collection_object { my $self = shift; my %args = validate(@_, { type => { default => $self->type }, }); my $class = $self->_get_record_object(type => $args{type})->collection_class; Prophet::App->require($class); my $records = $class->new( app_handle => $self->app_handle, handle => $self->handle, type => $args{type} || $self->type, ); return $records; } no Any::Moose 'Role'; 1; prophet-0.750/lib/Prophet/CLI/Command.pm000066400000000000000000000234561160607302300177300ustar00rootroot00000000000000package Prophet::CLI::Command; use Any::Moose; use Prophet::CLI; use Params::Validate qw(validate); has cli => ( is => 'rw', isa => 'Prophet::CLI', weak_ref => 1, handles => [ qw/app_handle handle config/, ], ); has context => ( is => 'rw', isa => 'Prophet::CLIContext', handles => [ qw/args set_arg arg has_arg delete_arg arg_names/, qw/props set_prop prop has_prop delete_prop prop_names/, 'add_to_prop_set', 'prop_set', ], ); has editor_var => ( is => 'rw', isa => 'Str', default => 'PROPHET_EDITOR', ); sub ARG_TRANSLATIONS { my $self = shift; return ( 'v' => 'verbose', 'a' => 'all' ); } =head2 Registering argument translations This is the Prophet CLI's way of supporting short forms for arguments, e.g. you want to let '-v' be able to used for the same purpose as '--verbose' without dirtying your code checking both or manually setting them if they exist. We want it to be as easy as possible to have short commands. To use, have your command subclass do: sub ARG_TRANSLATIONS { shift->SUPER::ARG_TRANSLATIONS(), f => 'file' }; You can register as many translations at a time as you want. The arguments will be translated when the command object is instantiated. If an arg already exists in the arg translation table, it is overwritten with the new value. =cut sub _translate_args { my $self = shift; my %translations = $self->ARG_TRANSLATIONS; for my $arg (keys %translations) { $self->set_arg($translations{$arg}, $self->arg($arg)) if $self->has_arg($arg); } } # run arg translations on object instantiation sub BUILD { my $self = shift; $self->_translate_args(); return $self; } sub fatal_error { my $self = shift; my $reason = shift; # always skip this fatal_error function when generating a stack trace local $Carp::CarpLevel = $Carp::CarpLevel + 1; die $reason . "\n"; } =head2 require_uuid Checks to make sure the uuid attribute is set. Prints an error and dies with the command's usage string if it is not set. =cut sub require_uuid { my $self = shift; if (!$self->has_uuid) { my $type = $self->type; my $name = (split /::/, $self->meta->name)[-1]; warn "No UUID or LUID given!\n"; $self->print_usage; } } =head2 edit_text [text] -> text Filters the given text through the user's C<$EDITOR> using L. If C<$ENV{$self-Eeditor_var}> is specified (C<$self-Eeditor_var> defaults to PROPHET_EDITOR), it is favored over C<$EDITOR>. =cut sub edit_text { my $self = shift; my $text = shift; # don't invoke the editor in a script, the test will appear to hang #die "Tried to invoke an editor in a test script!" if $ENV{IN_PROPHET_TEST_COMMAND}; require Proc::InvokeEditor; my $pi = Proc::InvokeEditor->new; my $editors = $pi->editors; my $editor = $ENV{$self->editor_var}; unshift @$editors, $editor if defined $editor; $pi->editors($editors); return scalar $pi->edit($text); } =head2 edit_hash hash => hashref, ordering => arrayref Filters the hash through the user's C<$EDITOR> using L. If C<$ENV{$self-Eeditor_var}> is specified (C<$self-Eeditor_var> defaults to PROPHET_EDITOR), it is favored over C<$EDITOR>. No validation is done on the input or output. If the optional ordering argument is specified, hash keys will be presented in that order (with unspecified elements following) for edit. If the record class for the current type defines a C routine, those props will not be presented for editing. False values are not returned unless a prop is removed from the output. =cut sub edit_hash { my $self = shift; validate( @_, { hash => 1, ordering => 0 } ); my %args = @_; my $hash = $args{'hash'}; my @ordering = @{ $args{'ordering'} || [] }; my $record = $self->_get_record_object; my @do_not_edit = $record->can('immutable_props') ? $record->immutable_props : (); if (@ordering) { # add any keys not in @ordering to the end of it my %keys_in_ordering; map { $keys_in_ordering{$_} = 1 if exists($hash->{$_}) } @ordering; map { push @ordering, $_ if !exists($keys_in_ordering{$_}) } keys %$hash; } else { @ordering = sort keys %$hash; } # filter out props we don't want to present for editing my %do_not_edit = map { $_ => 1 } @do_not_edit; @ordering = grep { !$do_not_edit{$_} } @ordering; my $input = join "\n", map { "$_: $hash->{$_}" } @ordering; my $output = $self->edit_text($input); die "Aborted.\n" if $input eq $output; # parse the output my $filtered = {}; for my $line (split "\n", $output) { if ($line =~ m/^([^:]+):\s*(.*)$/) { my $prop = $1; my $val = $2; # don't return empty values $filtered->{$prop} = $val unless !($val); } } no warnings 'uninitialized'; # if a key is deleted intentionally, set its value to '' for my $prop (keys %$hash) { if (!exists $filtered->{$prop} and ! exists $do_not_edit{$prop}) { $filtered->{$prop} = ''; } } # filter out unchanged keys as they clutter changesets if they're set again map { delete $filtered->{$_} if $hash->{$_} eq $filtered->{$_} } keys %$filtered; return $filtered; } =head2 edit_props arg => str, defaults => hashref, ordering => arrayref Returns a hashref of the command's props mixed in with any default props. If the "arg" argument is specified, (default "edit", use C if you only want default arguments), then L is invoked on the property list. If the C argument is specified, properties will be presented in that order (with unspecified props following) if filtered through L. =cut sub edit_props { my $self = shift; my %args = @_; my $arg = $args{'arg'} || 'edit'; my $defaults = $args{'defaults'}; my %props; if ($defaults) { %props = (%{ $defaults }, %{ $self->props }); } else { %props = %{$self->props}; } if ($self->has_arg($arg)) { return $self->edit_hash(hash => \%props, ordering => $args{'ordering'}); } return \%props; } =head2 prompt_choices question Asks user the question and returns 0 if answer was the second choice, 1 otherwise. (First choice is the default.) =cut sub prompt_choices { my $self = shift; my ($choice1, $choice2, $question) = @_; $choice1 = uc $choice1; # default is capsed $choice2 = lc $choice2; # non-default is lowercased Prophet::CLI->end_pager(); print "$question [$choice1/$choice2]: "; chomp( my $answer = ); Prophet::CLI->start_pager(); return $answer !~ /^$choice2$/i; } =head2 prompt_Yn question Asks user the question and returns true if answer was positive or false otherwise. Default answer is 'Yes' (returns true). =cut sub prompt_Yn { my $self = shift; my $msg = shift; return $self->prompt_choices( 'y', 'n', $msg ); } # Create a new [replica] config file section for the given replica if # it hasn't been seen before (config section doesn't already exist) sub record_replica_in_config { my $self = shift; my $replica_url = shift; my $replica_uuid = shift; my $url_variable = shift || 'url'; my %previous_sources_by_uuid = $self->app_handle->config->sources( by_variable => 1, variable => 'uuid', ); my $found_prev_replica = $previous_sources_by_uuid{$replica_uuid}; if ( !$found_prev_replica ) { # replica section doesn't exist at all; create a new one my $url = $replica_url; $self->app_handle->config->group_set( $self->app_handle->config->replica_config_file, [ { key => "replica.$url.$url_variable", value => $replica_url, }, { key => "replica.$url.uuid", value => $replica_uuid, }, ], ); } elsif ( $found_prev_replica ne $replica_url ) { # We're publishing to a different place than where it was published # to previously--we don't want to end up with a multivalue in the # config file, so just replace the old value. my $name = $self->app_handle->display_name_for_replica($replica_uuid); $self->app_handle->config->set( filename => $self->app_handle->config->replica_config_file, key => "replica.$name.$url_variable", value => $replica_url, ); } } =head2 print_usage Print the command's usage message to STDERR and die. Commands should implement C, which returns the usage message. If the usage message method needs arguments passed in, use a closure. =cut sub print_usage { my $self = shift; my %args = ( usage_method => sub { $self->usage_msg }, @_, ); die $args{usage_method}(); } =head2 get_cmd_and_subcmd_names [no_type => 1] Gets the name of the script that was run and the primary commands that were specified on the command-line. If a true boolean is passed in as C, won't add '' to the subcmd if no type was passed in via the primary commands. =cut sub get_cmd_and_subcmd_names { my $self = shift; my %args = @_; my $cmd = $self->cli->get_script_name; my @primary_commands = @{ $self->context->primary_commands }; # if primary commands was only length 1, the type was not specified # and we should indicate that a type is expected push @primary_commands, '' if @primary_commands <= 1 && !$args{no_type}; my $type_and_subcmd = join( q{ }, @primary_commands ); return ($cmd, $type_and_subcmd); } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/000077500000000000000000000000001160607302300173605ustar00rootroot00000000000000prophet-0.750/lib/Prophet/CLI/Command/Aliases.pm000066400000000000000000000114431160607302300213020ustar00rootroot00000000000000package Prophet::CLI::Command::Aliases; use Any::Moose; use Params::Validate qw/validate/; extends 'Prophet::CLI::Command::Config'; sub ARG_TRANSLATIONS { shift->SUPER::ARG_TRANSLATIONS(), s => 'show' }; sub usage_msg { my $self = shift; my $cmd = $self->cli->get_script_name; return <<"END_USAGE"; usage: ${cmd}aliases [show] ${cmd}aliases edit [--global|--user] ${cmd}alias [] END_USAGE } sub run { my $self = shift; $self->print_usage if $self->has_arg('h'); my $config = $self->config; my $template = $self->make_template; # alias.pull --from http://foo-bar.com/ # add is the same as set if ( $self->context->has_arg('add') && !$self->has_arg('set') ) { $self->context->set_arg('set', $self->arg('add') ) } if ( ! ( $self->has_arg('set') || $self->has_arg('delete') || $self->has_arg('edit') ) ) { print $template. "\n"; return; } else { $self->set_arg('set', 'alias.'.$self->arg('set')) if $self->has_arg('set'); $self->set_arg('delete', 'alias.'.$self->arg('delete')) if $self->has_arg('delete'); $self->SUPER::run(@_); } } sub make_template { my $self = shift; my $content = ''; $content .= $self->context->has_arg('edit') ? "# Editing aliases in config file ".$self->config_filename."\n\n" ."# Format: new_cmd = cmd\n" : "Active aliases for the current repository (including user-wide and" ." global\naliases if not overridden):\n\n"; # get aliases from the config file we're going to edit, or all of them if # we're just displaying my $aliases = $self->has_arg('edit') ? $self->app_handle->config->aliases( $self->config_filename ) : $self->app_handle->config->aliases; if ( %$aliases ) { for my $key ( keys %$aliases ) { $content .= "$key = $aliases->{$key}\n"; } } elsif ( !$self->has_arg('edit') ) { $content = "No aliases for the current repository.\n"; } return $content; } sub parse_template { my $self = shift; my $template = shift; my %parsed; for my $line ( split( /\n/, $template ) ) { if ( $line =~ /^\s*([^#].*?)\s*=\s*(.+?)\s*$/ ) { $parsed{$1} = $2; } } return \%parsed; } sub process_template { my $self = shift; my %args = validate( @_, { template => 1, edited => 1, record => 0 } ); my $updated = $args{edited}; my ($config) = $self->parse_template($updated); my $aliases = $self->app_handle->config->aliases( $self->config_filename ); my $c = $self->app_handle->config; my @added = grep { !$aliases->{$_} } sort keys %$config; my @changed = grep { $config->{$_} && $aliases->{$_} ne $config->{$_} } sort keys %$aliases; my @deleted = grep { !$config->{$_} } sort keys %$aliases; # attempt to set all added/changed/deleted aliases at once my @to_set = ( (map { { key => "alias.'$_'", value => $config->{$_} } } (@added, @changed)), (map { { key => "alias.'$_'" } } @deleted), ); eval { $c->group_set( $self->config_filename, \@to_set, ); }; # if we fail, prompt the user to re-edit # # one of the few ways to trigger this is to try to create an alias # that starts with a [ character if ($@) { chomp $@; my $error = "# Error: '$@'"; $self->handle_template_errors( rtype => 'aliases', template_ref => $args{template}, bad_template => $args{edited}, errors_pattern => '', error => $error, old_errors => $self->old_errors, ); $self->old_errors($error); return 0; } # otherwise, print out what changed and return happily else { for my $add ( @added ) { print 'Added alias ' . "'$add' = '$config->{$add}'\n"; } for my $change (@changed) { print "Changed alias '$change' from '$aliases->{$change}'" ."to '$config->{$change}'\n"; } for my $delete ( @deleted ) { print "Deleted alias '$delete'\n"; } return 1; } } # override the messages from Config module with messages w/better context for # Aliases override delete_usage_msg => sub { my $self = shift; my $app_cmd = $self->cli->get_script_name; my $cmd = shift; qq{usage: ${app_cmd}${cmd} "alias text"\n}; }; override add_usage_msg => sub { my $self = shift; my $app_cmd = $self->cli->get_script_name; my ($cmd, $subcmd) = @_; qq{usage: ${app_cmd}$cmd $subcmd "alias text" "cmd to translate to"\n}; }; __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Clone.pm000066400000000000000000000077211160607302300207650ustar00rootroot00000000000000package Prophet::CLI::Command::Clone; use Any::Moose; extends 'Prophet::CLI::Command::Merge'; sub usage_msg { my $self = shift; my $cmd = $self->cli->get_script_name; return <<"END_USAGE"; usage: ${cmd}clone --from [--as ] | --local END_USAGE } sub run { my $self = shift; $self->print_usage if $self->has_arg('h'); if ($self->has_arg('local')) { $self->list_bonjour_sources; return; } $self->validate_args(); $self->set_arg( 'to' => $self->app_handle->handle->url() ); $self->target( Prophet::Replica->get_handle( url => $self->arg('to'), app_handle => $self->app_handle, )); if ( $self->target->replica_exists ) { die "The target replica already exists.\n"; } if ( !$self->target->can_initialize ) { die "The target replica path you specified can't be created.\n"; } $self->source( Prophet::Replica->get_handle( url => $self->arg('from'), app_handle => $self->app_handle, )); my %init_args; if ( $self->source->isa('Prophet::ForeignReplica') ) { $self->target->after_initialize( sub { shift->app_handle->set_db_defaults } ); } else { %init_args = ( db_uuid => $self->source->db_uuid, resdb_uuid => $self->source->resolution_db_handle->db_uuid, ); } unless ($self->source->replica_exists) { die "The source replica '@{[$self->source->url]}' doesn't exist or is unreadable.\n"; } $self->target->initialize(%init_args); # create new config section for this replica my $from = $self->arg('from'); my $alias = $self->arg('as'); my $base_key = $alias ? 'replica.'.$alias : 'replica.'.$from; $self->app_handle->config->group_set( $self->app_handle->config->replica_config_file, [ { key => $base_key.'.url', value => $self->arg('from'), }, { key => $base_key.'.uuid', value => $self->target->uuid, }, ] ); if ( $self->source->can('database_settings') ) { my $remote_db_settings = $self->source->database_settings; my $default_settings = $self->app_handle->database_settings; for my $name ( keys %$remote_db_settings ) { my $uuid = $default_settings->{$name}[0]; die $name unless $uuid; my $s = $self->app_handle->setting( uuid => $uuid ); $s->set( $remote_db_settings->{$name} ); } } $self->SUPER::run(); } sub validate_args { my $self = shift; unless ( $self->has_arg('from') ) { warn "No --from specified!\n"; die $self->print_usage; } } # When we clone from another replica, we ALWAYS want to take their way forward, # even when there's an insane, impossible conflict # sub merge_resolver { 'Prophet::Resolver::AlwaysTarget'} =head2 list_bonjour_sources Probes the local network for bonjour replicas if the local arg is specified. Prints a list of all sources found. =cut sub list_bonjour_sources { my $self = shift; my @bonjour_sources; Prophet::App->try_to_require('Net::Bonjour'); if ( Prophet::App->already_required('Net::Bonjour') ) { print "Probing for local sources with Bonjour\n\n"; my $res = Net::Bonjour->new('prophet'); $res->discover; my $count = 0; for my $entry ( $res->entries ) { require URI; my $uri = URI->new(); $uri->scheme( 'http' ); $uri->host($entry->hostname); $uri->port( $entry->port ); $uri->path('replica/'); print ' * '.$uri->canonical.' - '.$entry->name."\n"; $count++; } if ($count) { print "\nFound $count source".($count==1? '' : 's')."\n"; } else { print "No local sources found.\n"; } } return; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Config.pm000066400000000000000000000243051160607302300211270ustar00rootroot00000000000000package Prophet::CLI::Command::Config; use Any::Moose; use Params::Validate qw/validate/; extends 'Prophet::CLI::Command'; with 'Prophet::CLI::TextEditorCommand'; has config_filename => ( is => 'rw', isa => 'Str', lazy => 1, default => sub { $_[0]->app_handle->config->replica_config_file; }, ); has old_errors => ( is => 'rw', isa => 'Str', default => '', ); sub ARG_TRANSLATIONS { shift->SUPER::ARG_TRANSLATIONS(), a => 'add', d => 'delete', s => 'show' }; sub usage_msg { my $self = shift; my $cmd = $self->cli->get_script_name; return <<"END_USAGE"; usage: ${cmd}config [show] ${cmd}config edit [--global|--user] ${cmd}config [] END_USAGE } sub run { my $self = shift; $self->print_usage if $self->has_arg('h'); my $config = $self->config; if ($self->has_arg('global')) { $self->config_filename($config->global_file); } elsif ($self->has_arg('user')) { $self->config_filename($config->user_file); } # add is the same as set if ( $self->context->has_arg('add') && !$self->has_arg('set') ) { $self->context->set_arg('set', $self->arg('add') ) } if ( $self->has_arg('set') || $self->has_arg('delete') ) { if ( $self->has_arg('set') ) { my $value = $self->arg('set'); if ( $value =~ /^\s*(.+?)\s*=\s*(.+?)\s*$/ ) { my ($key, $value) = ($1, $2); $self->_warn_unknown_args( $key, $value ); $config->set( key => $key, value => $value, filename => $self->config_filename, ); } # no value given, just print the current value else { $self->_warn_unknown_args( $self->arg('set') ); my $value = $config->get( key => $self->arg('set') ); if ( defined $value ) { print $config->get( key => $self->arg('set') ) . "\n"; } else { print "Key " . $self->arg('set') . " is not set.\n"; } } } elsif ( $self->has_arg('delete') ) { my $key = $self->arg('delete'); $self->_warn_unknown_args( $key ); $config->set( key => $key, filename => $self->config_filename, ); } } elsif ( $self->has_arg('edit') ) { my $done = 0; die "You don't have write permissions on " .$self->config_filename.", can't edit!\n" if (-e $self->config_filename && ! -w $self->config_filename) || ! -w (File::Spec->splitpath($self->config_filename))[1]; my $template = $self->make_template; while ( !$done ) { $done = $self->try_to_edit( template => \$template ); $config->load; } } else { # if no args are given, print out the contents of the currently loaded # config files print "Configuration:\n\n"; my @files =@{$config->config_files}; if (!scalar @files) { print $self->no_config_files; return; } print "Config files:\n\n"; for my $file (@files) { print "$file\n"; } print "\nYour configuration:\n\n"; $config->dump; } } sub _warn_unknown_args { my $self = shift; my $key = shift; my $value = shift; # help users avoid frustration if they accidentally do something # like config add aliases.foo = push --to foo@bar.com my %args = %{$self->args}; for my $arg ( qw(show edit add delete set user global) ) { delete $args{$arg}; } if ( keys %args != 0 ) { my $args_str = join(q{ }, keys %args); print "W: You have args set that aren't used by this command! Quote your\n" . "W: key/value if this was accidental.\n" . "W: - offending args: ${args_str}\n" . "W: - running command with key '$key'"; print ", value '$value'" if defined $value; print "\n"; } } sub make_template { my $self = shift; return -f $self->config_filename ? Prophet::Util->slurp( $self->config_filename ) : ''; } sub process_template { my $self = shift; my %args = validate( @_, { template => 1, edited => 1, record => 0 } ); # Attempt parsing the config. If we're good, remove any previous error # sections, write to disk and load. eval { $self->config->parse_content( content => $args{edited}, error => sub { Config::GitLike::error_callback( @_, filename => $self->config_filename ); }, ); }; if ($@) { chomp $@; my @error_lines = split "\n", $@; my $error = join "\n", map { "# Error: '$_'" } @error_lines; $self->handle_template_errors( rtype => 'configuration', template_ref => $args{template}, bad_template => $args{edited}, errors_pattern => '', error => $error, old_errors => $self->old_errors, ); return 0; } my $old_errors = $self->old_errors; Prophet::Util->write_file( file => $self->config_filename, content => $args{edited}, ); return 1; } sub no_config_files { my $self = shift; return "No configuration files found. " . " Either create a file called '".$self->handle->app_handle->config->replica_config_file. "' or set the PROPHET_APP_CONFIG environment variable.\n\n"; } sub parse_cli_arg { my $self = shift; my ($cmd, $arg) = @_; use Text::ParseWords qw(shellwords); my @args = shellwords($arg); if ( $args[0] eq 'show' ) { $self->context->set_arg(show => 1); } elsif ( $args[0] eq 'edit' ) { $self->context->set_arg(edit => 1); } elsif ( $args[0] eq 'delete' ) { $self->_setup_delete_subcmd( "$cmd delete", @args[1..$#args] ); } # all of these may also contain add|set after alias # prophet alias "foo bar" = "foo baz" (1) # prophet alias foo = bar (1) # prophet alias foo =bar (2) # prophet alias foo bar = bar baz (1) # prophet alias foo bar = "bar baz" (1) elsif ( $args[0] =~ /^(add|set)$/ || (@args >= 3 && grep { m/^=|=$/ } @args) # ex 1 || (@args == 2 && $args[1] =~ /^=|=$/) ) { # ex 2 my $subcmd = defined $1 ? $1 : q{}; shift @args if $args[0] =~ /^(?:add|set)$/; $self->_setup_old_syntax_add_subcmd( $cmd, $subcmd, @args ); } # alternate syntax (preferred): # prophet alias "foo bar" "bar baz", prophet alias foo "bar baz", # prophet alias foo bar, etc. # (can still have add|set at the beginning) else { my $subcmd = q{}; if ( $args[0] =~ /^(add|set)$/ ) { shift @args; $subcmd = $1; } $self->_setup_new_syntax_add_subcmd( $cmd, $subcmd, @args ); } } sub _setup_delete_subcmd { my $self = shift; my $cmd = shift; my @args = @_; if ( @args ) { my $remainder = join(q{ }, @args); $self->context->set_arg(delete => $remainder); } else { if ( $cmd =~ /delete/ ) { $self->print_usage( usage_method => sub { $self->delete_usage_msg( $cmd ); }, ); } else { $self->print_usage; } } } sub _setup_old_syntax_add_subcmd { my $self = shift; my $cmd = shift; my $subcmd = shift; my @args = @_; if ( @args > 1 ) { # divide words up into two groups split on = my (@orig_words, @new_words); my $seen_equals = 0; for my $word (@args) { if ( $seen_equals ) { push @new_words, $word; } else { if ( $word =~ s/=$// ) { $seen_equals = 1; # allows syntax like alias add foo bar= bar baz push @orig_words, $word if $word; next; } elsif ( $word =~ s/^=// ) { $seen_equals = 1; # allows syntax like alias add foo bar =bar baz push @new_words, $word if $word; next; } push @orig_words, $word; } } # join each group together to get what we're setting my $orig = join( q{ }, @orig_words ); my $new = join( q{ }, @new_words ); $orig = "'$orig'" if $cmd =~ /^alias/ && $orig =~ /\./; $self->context->set_arg(set => "$orig=$new"); } # all of these may also contain add|set after alias # prophet alias "foo = bar" # prophet alias "foo bar = foo baz" elsif ( defined $args[0] && $args[0] =~ /=/ ) { $self->context->set_arg(set => $args[0]); } else { # the only way this will be triggered is if the user types # "config add" or "config set" $self->print_usage( usage_method => sub { $self->add_usage_msg($cmd, $subcmd); }, ); } } sub _setup_new_syntax_add_subcmd { my $self = shift; my $cmd = shift; my $subcmd = shift; my @args = @_; if ( @args <= 2 ) { my ($orig, $new) = ($args[0], $args[1]); $orig = "'$orig'" if $cmd =~ /alias/ && $orig =~ /\./; if ( $new ) { $self->context->set_arg(set => "$orig=$new"); } else { $self->context->set_arg(set => $orig); } } else { $self->print_usage( usage_method => sub { $self->add_usage_msg($cmd, $subcmd); }, ); } } sub delete_usage_msg { my $self = shift; my $app_cmd = $self->cli->get_script_name; my $cmd = shift; qq{usage: ${app_cmd}${cmd} section.subsection.var\n}; } sub add_usage_msg { my $self = shift; my $app_cmd = $self->cli->get_script_name; my ($cmd, $subcmd) = @_; qq{usage: ${app_cmd}${cmd} ${subcmd} section.subsection.var ["key value"]\n}; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Create.pm000066400000000000000000000020461160607302300211230ustar00rootroot00000000000000package Prophet::CLI::Command::Create; use Any::Moose; extends 'Prophet::CLI::Command'; with 'Prophet::CLI::RecordCommand'; has '+uuid' => ( required => 0); has record => ( is => 'rw', isa => 'Prophet::Record', documentation => 'The record object of the created record.', ); sub usage_msg { my $self = shift; my ($cmd, $type_and_subcmd) = $self->get_cmd_and_subcmd_names; return <<"END_USAGE"; usage: ${cmd}${type_and_subcmd} -- prop1=foo prop2=bar END_USAGE } sub run { my $self = shift; $self->print_usage if $self->has_arg('h'); my $record = $self->_get_record_object; my ($val, $msg) = $record->create( props => $self->edit_props ); if (!$val) { warn "Unable to create record: " . $msg . "\n"; } if (!$record->uuid) { warn "Failed to create " . $record->record_type . "\n"; return; } $self->record($record); print "Created " . $record->record_type . " " . $record->luid . " (".$record->uuid.")"."\n"; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Delete.pm000066400000000000000000000013011160607302300211130ustar00rootroot00000000000000package Prophet::CLI::Command::Delete; use Any::Moose; extends 'Prophet::CLI::Command'; with 'Prophet::CLI::RecordCommand'; sub usage_msg { my $self = shift; my ($cmd, $type_and_subcmd) = $self->get_cmd_and_subcmd_names; return <<"END_USAGE"; usage: ${cmd}${type_and_subcmd} END_USAGE } sub run { my $self = shift; $self->print_usage if $self->has_arg('h'); $self->require_uuid; my $record = $self->_load_record; if ( $record->delete ) { print $record->type . " " . $record->uuid . " deleted.\n"; } else { print $record->type . " " . $record->uuid . "could not be deleted.\n"; } } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Export.pm000066400000000000000000000020111160607302300211710ustar00rootroot00000000000000package Prophet::CLI::Command::Export; use Any::Moose; extends 'Prophet::CLI::Command'; sub usage_msg { my $self = shift; my $cmd = $self->cli->get_script_name; return <<"END_USAGE"; usage: ${cmd}export --path [--format feed] END_USAGE } sub run { my $self = shift; my $class; $self->print_usage if $self->has_arg('h'); unless ($self->context->has_arg('path')) { warn "No --path argument specified!\n"; $self->print_usage; } if ($self->context->has_arg('format') && ($self->context->arg('format') eq 'feed') ){ $class = 'Prophet::ReplicaFeedExporter'; } else { $class = 'Prophet::ReplicaExporter'; } $self->app_handle->require ($class); my $exporter = $class->new( { target_path => $self->context->arg('path'), source_replica => $self->app_handle->handle, app_handle => $self->app_handle } ); $exporter->export(); } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/History.pm000066400000000000000000000010521160607302300213550ustar00rootroot00000000000000package Prophet::CLI::Command::History; use Any::Moose; extends 'Prophet::CLI::Command'; with 'Prophet::CLI::RecordCommand'; sub usage_msg { my $self = shift; my ($cmd, $type_and_subcmd) = $self->get_cmd_and_subcmd_names; return <<"END_USAGE"; usage: ${cmd}${type_and_subcmd} END_USAGE } sub run { my $self = shift; $self->print_usage if $self->has_arg('h'); $self->require_uuid; my $record = $self->_load_record; print $record->history_as_string; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Info.pm000066400000000000000000000026611160607302300206160ustar00rootroot00000000000000package Prophet::CLI::Command::Info; use Any::Moose; extends 'Prophet::CLI::Command'; sub ARG_TRANSLATIONS { shift->SUPER::ARG_TRANSLATIONS(), l => 'local' }; sub usage_msg { my $self = shift; my $cmd = $self->cli->get_script_name; return <<"END_USAGE"; usage: ${cmd}info END_USAGE } sub run { my $self = shift; $self->print_usage if $self->has_arg('h'); print "Records Database\n"; print "----------------\n"; print "Location: ".$self->handle->url." (@{[ref($self->handle)]})\n"; print "Database UUID: ".$self->handle->db_uuid."\n"; print "Replica UUID: ".$self->handle->uuid."\n"; print "Changesets: ".$self->handle->latest_sequence_no."\n"; print "Known types: ".join(',', @{$self->handle->list_types} )."\n\n"; print "Resolutions Database\n"; print "--------------------\n"; print "Location: " .$self->handle->resolution_db_handle->url." (@{[ref($self->handle)]})\n"; print "Database UUID: " .$self->handle->resolution_db_handle->db_uuid."\n"; print "Replica UUID: " .$self->handle->resolution_db_handle->uuid."\n"; print "Changesets: " .$self->handle->resolution_db_handle->latest_sequence_no."\n"; # known types get very unwieldy for resolutions # print "Known types: " # .join(',', @{$self->handle->resolution_db_handle->list_types} )."\n"; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Init.pm000066400000000000000000000017121160607302300206220ustar00rootroot00000000000000package Prophet::CLI::Command::Init; use Any::Moose; extends 'Prophet::CLI::Command'; sub usage_msg { my $self = shift; my $cmd = $self->cli->get_script_name; return <<"END_USAGE"; usage: ${cmd}init END_USAGE } sub run { my $self = shift; $self->print_usage if $self->has_arg('h'); if ($self->app_handle->handle->replica_exists) { die "Your Prophet database already exists.\n"; } $self->app_handle->handle->after_initialize( sub { shift->app_handle->set_db_defaults } ); $self->app_handle->handle->initialize; print "Initialized your new Prophet database.\n"; # create new config section for this replica my $url = $self->app_handle->handle->url; $self->app_handle->config->set( key => 'replica.'.$url.'.uuid', value => $self->app_handle->handle->uuid, filename => $self->app_handle->config->replica_config_file, ); } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Log.pm000066400000000000000000000100501160607302300204330ustar00rootroot00000000000000package Prophet::CLI::Command::Log; use Any::Moose; extends 'Prophet::CLI::Command'; sub usage_msg { my $self = shift; my $cmd = $self->cli->get_script_name; return <<"END_USAGE"; usage: ${cmd}log --all Show all entries ${cmd}log 0..LATEST~5 Show first entry up until the latest ${cmd}log LATEST~10 Show last ten entries ${cmd}log LATEST Show last entry END_USAGE } # Default: last 20 entries. # prophet log --all # show it all (overrides everything else) # prophet log --range 0..LATEST~5 # shows the first until 5 from the latest # prophet log --range LATEST~10 # shows last 10 entries # prophet log --range LATEST # shows the latest entry # syntactic sugar in dispatcher: # prophet log 0..LATEST~5 => prophet log --range 0..LATEST~5 # prophet log LATEST~10 => prophet log --range LATEST~10 sub run { my $self = shift; my $handle = $self->handle; $self->print_usage if $self->has_arg('h'); # --all overrides any other args if ($self->has_arg('all')) { $self->set_arg('range', '0..'.$handle->latest_sequence_no); } my ($start, $end) = $self->has_arg('range') ? $self->parse_range_arg() : ($handle->latest_sequence_no - 20, $handle->latest_sequence_no); # parse_range returned undef die "Invalid range specified.\n" if !defined($start) || !defined($end); $start = 0 if $start < 0; die "START must be before END in START..END.\n" if $end - $start < 0; $handle->traverse_changesets( reverse => 1, after => $start - 1, until => $end, callback => sub { my %args = (@_); $self->handle_changeset($args{changeset}); }, ); } =head2 parse_range_arg Parses the string in the 'range' arg into start and end sequence numbers and returns them in that order. Returns undef if the string is malformed. =cut sub parse_range_arg { my $self = shift; my $range = $self->arg('range'); # split on .. (denotes range) my @start_and_end = split(/\.\./, $range, 2); my ($start, $end); if (@start_and_end == 1) { # only one delimiter was specified -- this will be the # START; END defaults to the latest $end = $self->handle->latest_sequence_no; $start = $self->_parse_delimiter($start_and_end[0]); } elsif (@start_and_end == 2) { # both delimiters were specified # parse the first one as START $start = $self->_parse_delimiter($start_and_end[0]); # parse the second one as END $end = $self->_parse_delimiter($start_and_end[1]); } else { # something wrong was specified return undef; } return ($start, $end); } =head2 _parse_delimiter($delim) Takes a delimiter string and parses into a sequence number. If it is not either an integer number or of the form LATEST~#, returns undef (invalid delimiter). =cut sub _parse_delimiter { my ($self, $delim) = @_; if ($delim =~ m/^\d+$/) { # a sequence number was specified, just use it return $delim; } else { # try to parse what was given as LATEST~# # if it's just LATEST, we want only the last change my $offset; $offset = 0 if $delim eq 'LATEST'; (undef, $offset) = split(/~/, $delim, 2) if $delim =~ m/^LATEST~/; return undef unless defined $offset && $offset =~ m/^\d+$/; return $self->handle->latest_sequence_no - $offset; } return undef; } sub handle_changeset { my $self = shift; my $changeset = shift; print $changeset->as_string( change_header => sub { my $change = shift; $self->change_header($change); } ); } sub change_header { my $self = shift; my $change = shift; return " # " . $change->record_type . " " . $self->app_handle->handle->find_or_create_luid( uuid => $change->record_uuid ) . " (" . $change->record_uuid . ")\n"; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Merge.pm000066400000000000000000000140511160607302300207560ustar00rootroot00000000000000package Prophet::CLI::Command::Merge; use Any::Moose; extends 'Prophet::CLI::Command'; with 'Prophet::CLI::ProgressBar'; with 'Prophet::CLI::MirrorCommand'; has source => ( isa => 'Prophet::Replica', is => 'rw' ); has target => ( isa => 'Prophet::Replica', is => 'rw' ); sub ARG_TRANSLATIONS { shift->SUPER::ARG_TRANSLATIONS(), f => 'force' , n => 'dry-run', }; sub usage_msg { my $self = shift; my $cmd = $self->cli->get_script_name; return <<"END_USAGE"; usage: ${cmd}merge --from --to [options] Options are: -v|--verbose Be verbose -f|--force Do merge even if replica UUIDs differ -n|--dry-run Don't actually import changesets END_USAGE } sub run { my $self = shift; $self->print_usage if $self->has_arg('h'); Prophet::CLI->end_pager(); $self->source( Prophet::Replica->get_handle( url => $self->arg('from'), app_handle => $self->app_handle, )) unless $self->source; # subclass may already have set source $self->target( Prophet::Replica->get_handle( url => $self->arg('to'), app_handle => $self->app_handle, )) unless $self->target; # subclass may already have set target $self->validate_merge_replicas($self->source => $self->target); if ( $self->source->can('read_changeset_index') && $self->target->url eq $self->app_handle->handle->url) { # my $original_source = $self->source; # $self->source($self->get_cache_for_source($original_source)); # $self->sync_cache_from_source( target=> $self->source, source => $original_source); } # foreign replicas don't typically have a resdb handle, since they aren't # native $self->target->import_resolutions_from_remote_replica( from => $self->source, force => $self->has_arg('force'), resolver_class => 'Prophet::Resolver::Prompt', ) if ($self->source->resolution_db_handle); my $changesets = $self->_do_merge(); #Prophet::CLI->start_pager(); $self->print_report($changesets); } sub print_report { my $self = shift; my $changesets = shift; if ( $self->has_arg('verbose') ) { if ( $changesets == 0 ) { print "No new changesets.\n"; } elsif ( $changesets == 1 ) { print "Merged one changeset.\n"; } else { print "Merged $changesets changesets.\n"; } } else { print "\nDone.\n"; } } =head2 _do_merge Merges changesets from the source replica into the target replica. Fails fatally if the source and target are the same, or the target is not writable. Conflicts are resolved by either the resolver specified in the C environmental variable, the C argument (can be set to C or C, in which case Prophet will always prefer changesets from one replica or the other), or by using a default resolver. Returns the number of changesets merged. =cut sub _do_merge { my ($self) = @_; my $last_seen_from_source = $self->target->last_changeset_from_source( $self->source->uuid ); my %import_args = ( from => $self->source, resdb => $self->app_handle->handle->resolution_db_handle, resolver_class => $self->merge_resolver(), force => $self->has_arg('force'), ); my $changesets = 0; if ( $self->has_arg('dry-run') ) { $self->source->traverse_changesets( after => $last_seen_from_source, before_load_changeset_callback => sub { my %args = (@_); my $data = $args{changeset_metadata}; my ($seq, $orig_uuid, $orig_seq, $key) = @$data; # skip changesets we've seen before if ( $self->target->has_seen_changeset( source_uuid => $orig_uuid, sequence_no => $orig_seq) ){ return undef; } else { return 1; } }, callback => sub { my %args = (@_); if ( $self->target->should_accept_changeset( $args{changeset} ) ) { print $args{changeset}->as_string; } } ); } else { my $source_latest = $self->source->latest_sequence_no() || 0; if ( $self->has_arg('verbose') ) { print "Integrating changes from " . $last_seen_from_source . " to " . $source_latest . "\n"; $import_args{reporting_callback} = sub { my %args = @_; print $args{changeset}->as_string; $changesets++; }; } else { $import_args{reporting_callback} = $self->progress_bar( max => ( $source_latest - $last_seen_from_source ), format => "%30b %p %E\r" ); } $self->target->import_changesets(%import_args); return $changesets; } } sub validate_merge_replicas { my $self = shift; my $source = shift; my $target = shift; if ( ! $target->replica_exists ) { $self->handle->log_fatal("The target (".$self->arg('to').") replica doesn't exist"); } if ( ! $source->replica_exists ) { $self->handle->log_fatal("The source (".$self->arg('from').") replica doesn't exist"); } if ( $target->uuid eq $source->uuid ) { $self->handle->log_fatal( "You appear to be trying to merge two identical replicas. Skipping."); } if ( !$target->can_write_changesets ) { $self->handle->log_fatal( $target->url . " does not accept changesets. Perhaps it's unwritable."); } return 1; } sub merge_resolver { my $self = shift; my $prefer = $self->arg('prefer') || 'none'; my $resolver = $ENV{'PROPHET_RESOLVER'} ? 'Prophet::Resolver::' . $ENV{'PROPHET_RESOLVER'} : $prefer =~ /^(?:to|target)$/ ? 'Prophet::Resolver::AlwaysTarget' : $prefer =~ /^(?:from|source)$/ ? 'Prophet::Resolver::AlwaysSource' : (); return $resolver; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Mirror.pm000066400000000000000000000023731160607302300211750ustar00rootroot00000000000000package Prophet::CLI::Command::Mirror; use Any::Moose; use Params::Validate qw/:all/; extends 'Prophet::CLI::Command'; with 'Prophet::CLI::MirrorCommand'; has source => ( isa => 'Prophet::Replica', is => 'rw'); has target => ( isa => 'Prophet::Replica', is => 'rw'); sub ARG_TRANSLATIONS { shift->SUPER::ARG_TRANSLATIONS(), f => 'force' }; sub usage_msg { my $self = shift; my $cmd = $self->cli->get_script_name; return <<"END_USAGE"; usage: ${cmd}mirror --from END_USAGE } sub run { my $self = shift; Prophet::CLI->end_pager(); $self->print_usage if $self->has_arg('h'); $self->validate_args(); my $source = Prophet::Replica->get_handle( url => $self->arg('from'), app_handle => $self->app_handle,); unless ( $source->replica_exists ) { print "The source replica '@{[$source->url]}' doesn't exist or is unreadable."; exit 1; } my $target = $self->get_cache_for_source($source); $self->sync_cache_from_source( target=> $target, source => $source); print "\nDone.\n"; } sub validate_args { my $self = shift; unless ( $self->has_arg('from') ) { warn "No --from specified!\n"; $self->print_usage; } } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Publish.pm000066400000000000000000000103111160607302300213200ustar00rootroot00000000000000package Prophet::CLI::Command::Publish; use Any::Moose; extends 'Prophet::CLI::Command::Export'; with 'Prophet::CLI::PublishCommand'; with 'Prophet::CLI::CollectionCommand'; use File::Path; use File::Spec; sub usage_msg { my $self = shift; my $cmd = $self->cli->get_script_name; return <<"END_USAGE"; usage: ${cmd}publish --to [--html] [--replica] END_USAGE } sub run { my $self = shift; $self->print_usage if $self->has_arg('h'); unless ($self->has_arg('to')) { warn "No --to specified!\n"; $self->print_usage; } # substitute publish-url config variable for to arg if possible my %previous_sources_by_name = $self->app_handle->config->sources( variable => 'publish-url' ); my %shell_by_name = $self->app_handle->config->sources( variable => 'publish-shell' ); my $to = exists $previous_sources_by_name{$self->arg('to')} ? $previous_sources_by_name{$self->arg('to')} : $self->arg('to'); # set the temp directory where we will do all of our work, which will be # published via rsync $self->set_arg(path => $self->tempdir); my $export_html = $self->has_arg('html'); my $export_replica = $self->has_arg('replica'); # if the user specifies nothing, then publish the replica $export_replica = 1 if !$export_html; Prophet::CLI->end_pager(); # if we have the html argument, populate the tempdir with rendered templates if ($export_html) { print "Exporting a static HTML version of this replica\n"; $self->export_html() } # otherwise, do the normal prophet export this replica if ($export_replica) { print "Exporting a clone of this replica\n"; $self->SUPER::run(@_) } my $from = $self->arg('path'); print "Publishing the exported clone of the replica to $to with rsync\n"; $self->publish_dir( from => $from, to => $to, shell => $shell_by_name{$self->arg('to')}, ); print "Publication complete.\n"; # create new config section for where to publish this replica # if we're using a url rather than a name $self->record_replica_in_config($to, $self->handle->uuid, 'publish-url') if $to eq $self->arg('to'); } sub export_html { my $self = shift; my $path = $self->arg('path'); # if they specify both html and replica, then stick rendered templates # into a subdirectory. if they specify only html, assume they really # want to publish directly into the specified directory if ($self->has_arg('replica')){ $path = File::Spec->catdir($path => 'html'); mkpath([$path]); } $self->render_templates_into($path); } # helper methods for rendering templates sub render_templates_into { my $self = shift; my $dir = shift; require Prophet::Server; my $server_class = ref($self->app_handle) . "::Server"; if (!$self->app_handle->try_to_require($server_class)) { $server_class = "Prophet::Server"; } my $server = $server_class->new( app_handle => $self->app_handle ); $server->setup_template_roots(); # allow user to specify a specific type to render my @types = $self->type || $self->types_to_render; for my $type (@types) { my $subdir = File::Spec->catdir($dir, $type); mkpath([$subdir]); my $records = $self->get_collection_object(type => $type); $records->matching(sub { 1 }); open (my $fh, '>',File::Spec->catdir($subdir => 'index.html')); print { $fh } $server->render_template('record_table' => $records); close $fh; for my $record ($records->items) { open (my $fh, '>',File::Spec->catdir($subdir => $record->uuid.'.html')); print { $fh } $server->render_template('record' => $record); } } } sub should_skip_type { my $self = shift; my $type = shift; # should we skip all _private types? return 1 if $type eq $Prophet::Replica::MERGETICKET_METATYPE; return 0; } sub types_to_render { my $self = shift; return grep { !$self->should_skip_type($_) } @{ $self->handle->list_types }; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Pull.pm000066400000000000000000000074251160607302300206420ustar00rootroot00000000000000package Prophet::CLI::Command::Pull; use Any::Moose; extends 'Prophet::CLI::Command::Merge'; sub ARG_TRANSLATIONS { shift->SUPER::ARG_TRANSLATIONS(), l => 'local' }; sub usage_msg { my $self = shift; my $cmd = $self->cli->get_script_name; return <<"END_USAGE"; usage: ${cmd}pull --from ${cmd}pull --all ${cmd}pull --local END_USAGE } sub run { my $self = shift; my @from; $self->print_usage if $self->has_arg('h'); Prophet::CLI->end_pager(); # prefer replica.name.pull-url if it exists, otherwise use # replica.name.url my %previous_sources_by_name_pull_url = $self->app_handle->config->sources( variable => 'pull-url' ); my %previous_sources_by_name_url = $self->app_handle->config->sources; my $explicit_from = ''; if ($self->has_arg('from')) { # substitute friendly name -> replica url if we can my $url_from_name = exists $previous_sources_by_name_pull_url{$self->arg('from')} ? $previous_sources_by_name_pull_url{$self->arg('from')} : exists $previous_sources_by_name_url{$self->arg('from')} ? $previous_sources_by_name_url{$self->arg('from')} : $self->arg('from'); $explicit_from = $url_from_name; push @from, $explicit_from; } elsif ($self->has_arg('all')){ # if a source exists in both hashes, the pull-url version will # override the url version my %sources = (%previous_sources_by_name_url, %previous_sources_by_name_pull_url); for my $url (values %sources) { push @from, $url; } } $self->validate_args; $self->set_arg( to => $self->handle->url ); for my $from (grep { defined } ( @from, $self->find_bonjour_sources )) { print "Pulling from $from\n"; #if ( $self->has_arg('all') || $self->has_arg('local') ); $self->set_arg( from => $from ); $self->SUPER::run(); if ($self->source->uuid and ($from eq $explicit_from)) { $self->record_replica_in_config($explicit_from, $self->source->uuid); } print "\n"; } } sub validate_args { my $self = shift; unless ( $self->has_arg('from') || $self->has_arg('local') || $self->has_arg('all') ) { warn "No --from, --local, or --all specified!\n"; $self->print_usage; } } =head2 find_bonjour_sources Probes the local network for bonjour replicas if the local arg is specified. Returns a list of found replica URIs. =cut sub find_bonjour_sources { my $self = shift; my @bonjour_sources; # We can't pull from bonjour sources if we don't have a db yet return undef unless $self->app_handle->handle->replica_exists; my $db_uuid = $self->arg('db_uuid') || $self->app_handle->handle->db_uuid; if ( $self->has_arg('local') ) { Prophet::App->try_to_require('Net::Bonjour'); if ( Prophet::App->already_required('Net::Bonjour') ) { print "Probing for local database replicas with Bonjour\n"; my $res = Net::Bonjour->new('prophet'); $res->discover; for my $entry ( $res->entries ) { my $name = $entry->name; if ( $name eq $db_uuid || $name =~ m/[(]$db_uuid[)]$/ ) { print "Found a database replica on " . $entry->hostname."\n"; require URI; my $uri = URI->new(); $uri->scheme( 'http' ); $uri->host($entry->hostname); $uri->port( $entry->port ); $uri->path('replica/'); push @bonjour_sources, $uri->canonical.""; #scalarize } } } } return @bonjour_sources; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Push.pm000066400000000000000000000051261160607302300206410ustar00rootroot00000000000000package Prophet::CLI::Command::Push; use Any::Moose; extends 'Prophet::CLI::Command::Merge'; sub usage_msg { my $self = shift; my $cmd = $self->cli->get_script_name; return <<"END_USAGE"; usage: ${cmd}push --to [--force] END_USAGE } sub run { my $self = shift; Prophet::CLI->end_pager(); $self->print_usage if $self->has_arg('h'); $self->validate_args; # sub out friendly names for replica URLs if possible my %previous_sources_by_name_push_url = $self->app_handle->config->sources( variable => 'push-url' ); my %previous_sources_by_name_url = $self->app_handle->config->sources; my $original_to = $self->arg('to'); $self->set_arg( 'to' => exists $previous_sources_by_name_push_url{$self->arg('to')} ? $previous_sources_by_name_push_url{$self->arg('to')} : exists $previous_sources_by_name_url{$self->arg('to')} ? $previous_sources_by_name_url{$self->arg('to')} : $self->arg('to') ); # don't let users push to foreign replicas they haven't pulled from yet # without --force my %seen_replicas_by_url = $self->config->sources( by_variable => 1 ); my %seen_replicas_by_pull_url = $self->config->sources( by_variable => 1, variable => 'pull-url', ); (my $class, undef, undef) = Prophet::Replica->_url_to_replica_class( url => $self->arg('to'), app_handle => $self->app_handle, ); die "No replica found at '".$self->arg('to')."'.\n" unless $class; die "Can't push to HTTP replicas! You probably want to publish instead.\n" if $class->isa("Prophet::Replica::http"); die "Can't push to foreign replica that's never been pulled from! (Override with --force.)\n" unless $class->isa('Prophet::ForeignReplica') && ( $self->has_arg('force') || ( exists $seen_replicas_by_url{$self->arg('to')} || exists $seen_replicas_by_pull_url{$self->arg('to')} )); # prepare to run merge command (superclass) $self->set_arg( from => $self->handle->url ); $self->set_arg( db_uuid => $self->handle->db_uuid ); $self->SUPER::run(); # we want to record only the replica we're pushing TO, and only if we # weren't using a friendly name already $self->record_replica_in_config($self->arg('to'), $self->target->uuid) if $self->arg('to') eq $original_to; } sub validate_args { my $self = shift; unless ( $self->context->has_arg('to') ) { warn "No --to specified!\n"; $self->print_usage; } } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Search.pm000066400000000000000000000130151160607302300211230ustar00rootroot00000000000000package Prophet::CLI::Command::Search; use Any::Moose; extends 'Prophet::CLI::Command'; with 'Prophet::CLI::RecordCommand'; with 'Prophet::CLI::CollectionCommand'; has '+uuid' => ( required => 0, ); has 'sort_routine' => ( is => 'rw', isa => 'CodeRef', required => 0, # default subs are executed immediately, hence the weird syntax for coderefs default => sub { sub { my $records = shift; return (sort { $a->luid <=> $b->luid } @$records); } }, documentation => 'A subroutine which takes a arrayref to a list of records and returns them sorted in some way.', ); has group_routine => ( is => 'rw', isa => 'CodeRef', required => 0, default => sub { sub { my $records = shift; return [ { label => '', records => $records } ]; } }, documentation => 'A subroutine which takes an arrayref to a list of records and returns an array of hashrefs { label => $label, records => \@array}' ); sub usage_msg { my $self = shift; my ($cmd, $type_and_subcmd) = $self->get_cmd_and_subcmd_names; return <<"END_USAGE"; usage: ${cmd}${type_and_subcmd} ${cmd}${type_and_subcmd} -- prop1=~foo prop2!~bar|baz END_USAGE } sub default_match { 1 } sub get_search_callback { my $self = shift; my %prop_checks; for my $check ($self->prop_set) { push @{ $prop_checks{ $check->{prop} } }, $check; } my $regex = $self->arg('regex'); return sub { my $item = shift; my $props = $item->get_props; my $did_limit = 0; if ($self->prop_names > 0) { $did_limit = 1; for my $prop (keys %prop_checks) { my $got = $props->{$prop}; my $ok = 0; for my $check (@{ $prop_checks{$prop} }) { $ok = 1 if $self->_compare($check->{value}, $check->{cmp}, $got); } return 0 if !$ok; } } # if they specify a regex, it must match if ($regex) { $did_limit = 1; my $ok = 0; for (values %$props) { if (/$regex/) { $ok = 1; last; } } return 0 if !$ok; } return $self->default_match($item) if !$did_limit; return 1; }; } sub _compare { my $self = shift; my ($expected, $cmp, $got) = @_; $got = '' if !defined($got); # avoid undef warnings if ($cmp eq '=') { return 0 unless $got eq $expected; } elsif ($cmp eq '=~') { return 0 unless $got =~ $expected; } elsif ($cmp eq '!=' || $cmp eq '<>' || $cmp eq 'ne') { return 0 if $got eq $expected; } elsif ($cmp eq '!~') { return 0 if $got =~ $expected; } return 1; } sub run { my $self = shift; $self->print_usage if $self->has_arg('h'); my $records = $self->get_collection_object(); my $search_cb = $self->get_search_callback(); $records->matching($search_cb); $self->display_terminal($records); } =head2 display_terminal $records Takes a collection of records, sorts it according to C<$sort_routine>, and then prints it to standard output using Lformat_summary> as the format. =cut sub display_terminal { my $self = shift; my $records = shift; my $groups = $self->group_routine->( [$records->items] ); foreach my $group ( @{$groups} ) { $self->out_group_heading( $group, $groups ); $self->out_record($_) for $self->sort_routine->( $group->{records} ); } } =head2 sort_by_prop $prop, $records, $sort_undef_last Given a property name and an arrayref to a list of records, returns a list of the records sorted by their C property, in ascending order. If $sort_undef_last is true, records which don't have a property defined are sorted *after* all other records; otherwise, they are sorted before. =cut sub sort_by_prop { my ($self, $prop, $records, $sort_undef_last) = @_; no warnings 'uninitialized'; # some records might not have this prop return (sort { my $prop_a = $a->prop($prop); my $prop_b = $b->prop($prop); if ( $sort_undef_last && !defined($prop_a) ) { return 1; } elsif ( $sort_undef_last && !defined($prop_b) ) { return -1; } else { return $prop_a cmp $prop_b; } } @{$records}); } =head2 group_by_prop $prop => $records Given a property name and an arrayref to a list of records, returns a reference to a list of hashes of the form: { label => $label, records => \@records } =cut sub group_by_prop { my $self = shift; my $prop = shift; my $records = shift; my $results = {}; for my $record (@$records) { push @{ $results->{ ( $record->prop($prop) || '') } }, $record; } return [ map { { label => $_, records => $results->{$_} } } keys %$results ]; } sub out_group_heading { my $self = shift; my $group = shift; my $groups = shift; # skip headings with no records return unless exists $group->{records}->[0]; return unless @$groups > 1; $group->{label} ||= 'none'; print "\n". $group->{label} ."\n" . ("=" x length $group->{label} ) . "\n\n"; } sub out_record { my $self = shift; my $record = shift; print $record->format_summary . "\n"; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Server.pm000066400000000000000000000023771160607302300211750ustar00rootroot00000000000000package Prophet::CLI::Command::Server; use Any::Moose; extends 'Prophet::CLI::Command'; has server => ( is => 'rw', isa => 'Maybe[Prophet::Server]', default => sub { my $self = shift; return $self->setup_server(); }, lazy => 1, ); sub ARG_TRANSLATIONS { shift->SUPER::ARG_TRANSLATIONS(), p => 'port', w => 'writable' }; use Prophet::Server; sub usage_msg { my $self = shift; my ($cmd, $subcmd) = $self->get_cmd_and_subcmd_names( no_type => 1 ); return <<"END_USAGE"; usage: ${cmd}${subcmd} [--port ] END_USAGE } sub run { my $self = shift; $self->print_usage if $self->has_arg('h'); Prophet::CLI->end_pager(); $self->server->run; } sub setup_server { my $self = shift; my $server_class = ref($self->app_handle) . "::Server"; if (!$self->app_handle->try_to_require($server_class)) { $server_class = "Prophet::Server"; } my $server; if ( $self->has_arg('port') ) { $server = $server_class->new( app_handle => $self->app_handle, port => $self->arg('port') ); } else { $server = $server_class->new( app_handle => $self->app_handle ); } return $server; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Settings.pm000066400000000000000000000120111160607302300215110ustar00rootroot00000000000000package Prophet::CLI::Command::Settings; use Any::Moose; use Params::Validate qw/validate/; use JSON; extends 'Prophet::CLI::Command'; with 'Prophet::CLI::TextEditorCommand'; sub ARG_TRANSLATIONS { shift->SUPER::ARG_TRANSLATIONS(), s => 'show' }; sub usage_msg { my $self = shift; my $cmd = $self->cli->get_script_name; return <<"END_USAGE"; usage: ${cmd}settings [show] ${cmd}settings edit ${cmd}settings set -- setting "new value" Note that setting values must be valid JSON. END_USAGE } sub run { my $self = shift; $self->print_usage if $self->has_arg('h'); my $settings = $self->app_handle->database_settings; my $template = $self->make_template; if ( $self->has_arg( 'edit' ) ) { my $done = 0; while ( !$done ) { Prophet::CLI->end_pager(); $done = $self->try_to_edit( template => \$template ); } } elsif ( $self->context->has_arg('set') ) { for my $name ( $self->context->prop_names ) { my $uuid; if ($settings->{$name}) { $uuid = $settings->{$name}->[0]; } else { print "Setting \"$name\" does not exist, skipping.\n"; next; } my $s = $self->app_handle->setting( uuid => $uuid ); my $old_value = $s->get_raw; my $new_value = $self->context->props->{$name}; print "Trying to change " . $name . " from $old_value to $new_value.\n"; if ( $old_value ne $new_value ) { $s->set( from_json( $new_value, { utf8 => 1 } ) ); print " -> Changed.\n"; } else { print " -> No change needed.\n"; } } return; } else { print $template. "\n"; return; } } sub make_template { my $self = shift; my $content = ''; # get all settings records (the defaults, not the # ones in the DB) -- current values from the DB are retrieved in # _make_template_entry) my $settings = $self->app_handle->database_settings; for my $name ( keys %$settings ) { my @metadata = @{ $settings->{$name} }; my $s = $self->app_handle->setting( label => $name, uuid => ( shift @metadata ), default => [@metadata] ); $content .= $self->_make_template_entry($s) . "\n\n"; } return $content; } sub _make_template_entry { my $self = shift; my $setting = shift; # format each settings record as # # # uuid: uuid # key: value, value, value # return "# uuid: " . $setting->uuid . "\n" . $setting->label . ": " # this is what does the actual loading of settings # in the database to override the defaults . to_json( $setting->get, { canonical => 1, pretty => 0, utf8 => 1, allow_nonref => 0 } ); } sub parse_template { my $self = shift; my $template = shift; my $uuid = 'NONE'; my %content; my %parsed; for my $line ( split( /\n/, $template ) ) { if ( $line =~ /^\s*\#\s*uuid\:\s*(.*?)\s*$/ ) { $uuid = $1; } else { push @{ $content{$uuid} }, $line; } } for my $uuid ( keys %content ) { my $data = join( "\n", @{ $content{$uuid} } ); if ( $data =~ /^(.*?)\s*:\s*(.*)\s*$/ms ) { my $label = $1; my $content = $2; $parsed{$uuid} = [ $label, $content ]; } } return \%parsed; } sub process_template { my $self = shift; my %args = validate( @_, { template => 1, edited => 1, record => 0 } ); my $updated = $args{edited}; my ($config) = $self->parse_template($updated); no warnings 'uninitialized'; my $settings = $self->app_handle->database_settings; my %settings_by_uuid = map { uc($settings->{$_}->[0]) => $_ } keys %$settings; my $settings_changed = 0; for my $uuid ( keys %$config ) { # the parsed template could conceivably contain nonexistent uuids my $s; if ($settings_by_uuid{uc($uuid)}) { $s = $self->app_handle->setting( uuid => $uuid ); } else { print "Setting with uuid \"$uuid\" does not exist.\n"; next; } my $old_value = $s->get_raw; my $new_value = $config->{$uuid}->[1]; chomp $new_value; if ( $old_value ne $new_value ) { eval { $s->set( from_json( $new_value, { utf8 => 1 } ) ); print "Changed " . $config->{$uuid}->[0] . " from $old_value to $new_value.\n"; $settings_changed++; }; if ($@) { # error parsing the JSON print 'An error occured setting '.$settings_by_uuid{$uuid}." to $new_value: $@"; } } } print "No settings changed.\n" unless $settings_changed; return 1; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Shell.pm000066400000000000000000000065461160607302300210000ustar00rootroot00000000000000#!/usr/bin/env perl package Prophet::CLI::Command::Shell; use Any::Moose; extends 'Prophet::CLI::Command'; use File::Spec; use Prophet::Util; use Text::ParseWords qw(shellwords); use Scalar::Util qw(weaken); has name => ( is => 'ro', isa => 'Str', default => sub { Prophet::Util->updir($0)} ); has term => ( is => 'ro', isa => 'Term::ReadLine::Stub', lazy => 1, handles => [qw/readline addhistory/], default => sub { my $self = shift; my $weakself = $self; weaken($weakself); require Term::ReadLine; my $term = Term::ReadLine->new("Prophet shell"); $term->Attribs->{completion_function} = sub { $weakself->_complete(@_); }; return $term; }, ); our $HIST = $ENV{PROPHET_HISTFILE} || (($ENV{HOME} || (getpwuid($<))[7]) . "/.prophetreplhist"); our $LEN = $ENV{PROPHET_HISTLEN} || 500; sub usage_msg { my $self = shift; my $cmd = $self->cli->get_script_name; return <<"END_USAGE"; usage: ${cmd}\[shell] END_USAGE } sub prompt { my $self = shift; return $self->name . '> '; } sub preamble { return join "\n", "Prophet $Prophet::VERSION", 'Type "help", "about", or "copying" for more information.', } sub read { my $self = shift; Prophet::CLI->end_pager; # in case a previous command died $self->readline($self->prompt); } sub eval { my $self = shift; my $line = shift; eval { local $SIG{__DIE__} = 'DEFAULT'; my @args = shellwords($line); $self->cli->run_one_command(@args); }; warn $@ if $@; } sub _run { my $self = shift; Prophet::CLI->end_pager; local $| = 1; print $self->preamble . "\n"; # we don't want to run the pager for the shell $self->cli->interactive_shell(1); while ( defined(my $cmd = $self->read)) { next if $cmd =~ /^\s*$/; last if $cmd =~ /^\s*q(?:uit)?\s*$/i || $cmd =~ /^\s*exit\s*$/i; $self->eval($cmd); } } sub _complete { my ($self, $last_word, $line, $start) = @_; # we can't just use $last_word because we want all the text before the cursor to # matter, not just the current word my $dispatcher = $self->cli->dispatcher_class->new; # We're supposed to return only the completion of $last_word, not replacements # of $line. So for a completion that returns multiple words, this could screw # up and return only its last word. my @matches = map { s/^.* //; $_ } $dispatcher->complete($line); return @matches; } # make the REPL history persistent sub run{ my $self = shift; $self->print_usage if $self->has_arg('h'); $self->_read_repl_history(); $self->_run(@_); $self->_write_repl_history(); }; # we use eval here because only some Term::ReadLine subclasses support # persistent history. it also seems that ->can doesn't work because of AUTOLOAD # trickery. :( sub _read_repl_history { my $self = shift; eval { local $SIG{__DIE__}; $self->term->stifle_history($LEN); $self->term->ReadHistory($HIST) if -f $HIST; }; } sub _write_repl_history { my $self = shift; eval { local $SIG{__DIE__}; $self->term->WriteHistory($HIST) or warn "Unable to write to shell history file $HIST"; }; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Show.pm000066400000000000000000000045271160607302300206460ustar00rootroot00000000000000package Prophet::CLI::Command::Show; use Any::Moose; use Params::Validate; extends 'Prophet::CLI::Command'; with 'Prophet::CLI::RecordCommand'; sub ARG_TRANSLATIONS { shift->SUPER::ARG_TRANSLATIONS(), 'b' => 'batch' }; sub usage_msg { my $self = shift; my ($cmd, $type_and_subcmd) = $self->get_cmd_and_subcmd_names; return <<"END_USAGE"; usage: ${cmd}$type_and_subcmd [--batch] [--verbose] END_USAGE } sub run { my $self = shift; $self->print_usage if $self->has_arg('h'); $self->require_uuid; my $record = $self->_load_record; print $self->stringify_props( record => $record, batch => $self->has_arg('batch'), verbose => $self->has_arg('verbose'), ); } =head2 stringify_props Returns a stringified form of the properties suitable for displaying directly to the user. Also includes luid and uuid. =cut sub stringify_props { my $self = shift; my %args = validate( @_, {record => { ISA => 'Prophet::Record'}, batch => 1, verbose => 1}); my $record = $args{'record'}; my $props = $record->get_props; # which props are we going to display? my @show_props; if ($record->can('props_to_show')) { @show_props = $record->props_to_show(\%args); # if they ask for verbosity, then display all the other fields # after the fields that our subclass wants to show if ($args{verbose}) { my %already_shown = map { $_ => 1 } @show_props; push @show_props, grep { !$already_shown{$_} } sort keys %$props; } } else { @show_props = ('id', sort keys %$props); } # kind of ugly but it simplifies the code $props->{id} = $record->luid ." (" . $record->uuid . ")"; my @fields; for my $field (@show_props) { my $value = $props->{$field}; # don't bother displaying unset fields next if !defined($value); push @fields, [$field, $value]; } return join '', map { my ($field, $value) = @$_; $self->format_prop(@$_); } @fields; } sub format_prop { my $self = shift; my $field = shift; my $value = shift; return "$field: $value\n" } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Command/Update.pm000066400000000000000000000040741160607302300211450ustar00rootroot00000000000000package Prophet::CLI::Command::Update; use Any::Moose; extends 'Prophet::CLI::Command'; with 'Prophet::CLI::RecordCommand'; sub ARG_TRANSLATIONS { shift->SUPER::ARG_TRANSLATIONS(), e => 'edit' }; sub usage_msg { my $self = shift; my ($cmd, $type_and_subcmd) = $self->get_cmd_and_subcmd_names; return <<"END_USAGE"; usage: ${cmd}${type_and_subcmd} --edit ${cmd}${type_and_subcmd} -- prop1="new value" END_USAGE } sub edit_record { my $self = shift; my $record = shift; my $props = $record->get_props; # don't feed in existing values if we're not interactively editing my $defaults = $self->has_arg('edit') ? $props : undef; my @ordering = ( ); # we want props in $record->props_to_show to show up in the editor if --edit # is supplied too if ($record->can('props_to_show') && $self->has_arg('edit')) { @ordering = $record->props_to_show; map { $props->{$_} = '' if !exists($props->{$_}) } @ordering; } return $self->edit_props(arg => 'edit', defaults => $defaults, ordering => \@ordering); } sub run { my $self = shift; $self->print_usage if $self->has_arg('h'); $self->require_uuid; my $record = $self->_load_record; my $new_props = $self->edit_record($record); # filter out props that haven't changed for my $prop (keys %$new_props) { my $old_prop = defined $record->prop($prop) ? $record->prop($prop) : ''; delete $new_props->{$prop} if ($old_prop eq $new_props->{$prop}); } if (keys %$new_props) { my $result = $record->set_props( props => $new_props ); if ($result) { print ucfirst($record->type) . " " . $record->luid . " (".$record->uuid.")"." updated.\n"; } else { print "SOMETHING BAD HAPPENED " . $record->type . " " . $record->luid . " (" . $record->uuid . ") not updated.\n"; } } else { print "No properties changed.\n"; } } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Dispatcher.pm000066400000000000000000000115001160607302300204230ustar00rootroot00000000000000package Prophet::CLI::Dispatcher; use Path::Dispatcher::Declarative -base; use Any::Moose; extends 'Path::Dispatcher::Declarative', any_moose('Object'); use Prophet::CLI::Dispatcher::Rule::RecordId; with 'Prophet::CLI::Parameters'; our $cli; our @PREFIXES = qw(Prophet::CLI::Command); sub add_command_prefix { unshift @PREFIXES, @_ } on '' => sub { my $self = shift; if ($self->context->has_arg('version')) { run_command("Version")->($self) } elsif( $self->context->has_arg('help') ){ run_command("Help")->($self) } else { next_rule } }; # publish foo@bar.com:www/baz => publish --to foo@bar.com:www/baz on qr{^(publish|push) (\S+)$} => sub { my $self = shift; $self->context->set_arg(to => $2); run($1, $self); }; # clone http://fsck.com/~jesse/sd-bugs => clone --from http://fsck.com/~jesse/sd-bugs on qr{^(clone|pull) (\S+)$} => sub { my $self = shift; $self->context->set_arg(from => $2); run($1, $self); }; # log range => log --range range on qr{log\s+([0-9LATEST.~]+)} => sub { my $self = shift; $self->context->set_arg(range => $1); run('log', $self); }; under settings => sub { my $self = shift; on edit => sub { my $self = shift; $self->context->set_arg( 'edit' ); run('settings', $self); }; on show => sub { my $self = shift; $self->context->set_arg( 'show' ); run('settings', $self); }; on set => sub { my $self = shift; $self->context->set_arg( 'set' ); run('settings', $self); }; }; dispatcher->add_rule( Path::Dispatcher::Rule::Sequence->new( rules => [ Path::Dispatcher::Rule::Regex->new( regex => qr/^(update|edit|show|display|delete|del|rm|history)$/, ), Prophet::CLI::Dispatcher::Rule::RecordId->new, ], block => sub { my $match = shift; my $self = shift; $self->context->set_id_from_primary_commands; run($match->pos(1), $self, @_); }, ) ); on [ [ 'update', 'edit' ] ] => run_command("Update"); on [ [ 'show', 'display' ] ] => run_command("Show"); on [ [ 'delete', 'del', 'rm' ] ] => run_command("Delete"); on history => run_command("History"); on [ ['create', 'new'] ] => run_command("Create"); on [ ['search', 'list', 'ls' ] ] => run_command("Search"); on [ ['aliases', 'alias'] ] => run_command('Aliases'); on version => run_command("Version"); on init => run_command("Init"); on clone => run_command("Clone"); on merge => run_command("Merge"); on mirror => run_command('Mirror'); on pull => run_command("Pull"); on publish => run_command("Publish"); on server => run_command("Server"); on config => run_command("Config"); on settings => run_command("Settings"); on log => run_command("Log"); on shell => run_command("Shell"); on export => run_command('Export'); on info => run_command('Info'); on push => run_command('Push'); on qr/^(alias(?:es)?|config)?\s+(.*)/ => sub { my ( $self ) = @_; my $cmd = $1; my $arg = $2; my $class = $cmd =~ /^alias/ ? 'Aliases' : 'Config'; # Load command class so we can run # its arg-parsing sub (the syntax is complex) my @classes = $self->class_names($class); for my $class (@classes) { Prophet::App->try_to_require($class) or next; my $cmd_obj = $class->new( context => $self->context, cli => $self->cli, ); $cmd_obj->parse_cli_arg($cmd, $arg); return run( $cmd, $self, @_ ); } # Something is wrong with the app layout... die "Could not find '$class' command class"; }; on qr/^_gencomp\s*(.*)/ => sub { my $self = shift; my $path = $1; $path = "" if !defined($path); print "$_\n" for $self->dispatcher->complete($path); }; sub run_command { my $name = shift; return sub { my $self = shift; my %constructor_args = ( cli => $self->cli, context => $self->context, commands => $self->context->primary_commands, type => $self->context->type, uuid => $self->context->uuid, ); # undef causes type constraint violations for my $key (keys %constructor_args) { delete $constructor_args{$key} if !defined($constructor_args{$key}); } my @classes = $self->class_names($name); for my $class (@classes) { Prophet::App->try_to_require($class) or next; $class->new(%constructor_args)->run; return; } die "Invalid command command class suffix '$name'"; }; } sub class_names { my $self = shift; my $command = shift; return map { $_."::".$command } @PREFIXES; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/Dispatcher/000077500000000000000000000000001160607302300200705ustar00rootroot00000000000000prophet-0.750/lib/Prophet/CLI/Dispatcher/Rule.pm000066400000000000000000000001741160607302300213370ustar00rootroot00000000000000package Prophet::CLI::Dispatcher::Rule; use Any::Moose 'Role'; with 'Prophet::CLI::Parameters'; no Any::Moose 'Role'; 1; prophet-0.750/lib/Prophet/CLI/Dispatcher/Rule/000077500000000000000000000000001160607302300207775ustar00rootroot00000000000000prophet-0.750/lib/Prophet/CLI/Dispatcher/Rule/RecordId.pm000066400000000000000000000015551160607302300230360ustar00rootroot00000000000000package Prophet::CLI::Dispatcher::Rule::RecordId; use Any::Moose; extends 'Path::Dispatcher::Rule::Regex'; with 'Prophet::CLI::Dispatcher::Rule'; use Prophet::CLIContext; has '+regex' => ( default => sub { qr/^$Prophet::CLIContext::ID_REGEX$/i }, ); has type => ( is => 'ro', isa => 'Str', ); sub complete { my $self = shift; my $path = shift->path; my $handle = $self->cli->app_handle->handle; my @types = $self->type || @{ $handle->list_types }; my @ids; for my $type (@types) { push @ids, grep { substr($_, 0, length($path)) eq $path } map { ($_->uuid, $_->luid) } @{ $handle->list_records( type => $type, record_class => $self->cli->record_class, ) }; } return @ids; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/CLI/MirrorCommand.pm000066400000000000000000000027721160607302300211210ustar00rootroot00000000000000package Prophet::CLI::MirrorCommand; use Any::Moose 'Role'; with 'Prophet::CLI::ProgressBar'; use Params::Validate ':all'; sub get_cache_for_source { my $self = shift; my ($source) = validate_pos(@_,{isa => 'Prophet::Replica'}); my $target = Prophet::Replica->get_handle( url => 'prophet_cache:' . $source->uuid , app_handle => $self->app_handle ); if ( !$target->replica_exists && !$target->can_initialize ) { die "The target replica path you specified can't be created.\n"; } $target->initialize_from_source($source); return $target; } sub sync_cache_from_source { my $self = shift; my %args = validate(@_, { target => { isa => 'Prophet::Replica::prophet_cache'}, source => { isa => 'Prophet::Replica'}}); if ($args{target}->latest_sequence_no == $args{source}->latest_sequence_no) { print "Mirror of ".$args{source}->url. " is already up to date\n"; return } print "Mirroring resolutions from " . $args{source}->url . "\n"; $args{target}->resolution_db_handle->mirror_from( source => $args{source}->resolution_db_handle, reporting_callback => $self->progress_bar( max => ($args{source}->resolution_db_handle->latest_sequence_no ||0) ) ); print "\nMirroring changesets from " . $args{source}->url . "\n"; $args{target}->mirror_from( source => $args{source}, reporting_callback => $self->progress_bar( max => ($args{source}->latest_sequence_no ||0) ) ); } no Any::Moose 'Role'; 1; prophet-0.750/lib/Prophet/CLI/Parameters.pm000066400000000000000000000003411160607302300204410ustar00rootroot00000000000000#!/usr/bin/env perl package Prophet::CLI::Parameters; use Any::Moose 'Role'; sub cli { return $Prophet::CLI::Dispatcher::cli; } sub context { my $self = shift; $self->cli->context; } no Any::Moose 'Role'; 1; prophet-0.750/lib/Prophet/CLI/ProgressBar.pm000066400000000000000000000010721160607302300205710ustar00rootroot00000000000000package Prophet::CLI::ProgressBar; use Any::Moose 'Role'; use Time::Progress; use Params::Validate ':all'; sub progress_bar { my $self = shift; my %args = validate(@_, {max => 1, format => { optional =>1, default => "%30b %p %L (%E remaining)\r" }}); my $bar = Time::Progress->new(); $bar->attr(max => $args{max}); my $bar_count = 0; my $format = $args{format}; return sub { # disable autoflush to make \r work properly local $| = 1; print $bar->report( $format, ++$bar_count ); } } no Any::Moose 'Role'; 1; prophet-0.750/lib/Prophet/CLI/PublishCommand.pm000066400000000000000000000055611160607302300212540ustar00rootroot00000000000000package Prophet::CLI::PublishCommand; use Any::Moose 'Role'; use File::Temp (); sub tempdir { my $dir = File::Temp::tempdir(CLEANUP => ! $ENV{PROPHET_DEBUG} ); return $dir; } sub publish_dir { my $self = shift; my %args = @_; $args{from} .= '/'; my $rsync = $ENV{RSYNC} || "rsync"; my @args; # chmod feature requires rsync >= 2.6.7 my ($rsync_version) = ( (qx{$rsync --version})[0] =~ /version ([\d.]+) / ); $rsync_version =~ s/[.]//g if $rsync_version; # kill dot separators in vnum if ( $rsync_version && $rsync_version < 267 ) { warn <<'END_WARNING'; W: rsync >= 2.6.7 is required in order to ensure the published replica has W: the default permissions of the destination if they are more permissive W: than the source replica's permissions. You may wish to upgrade your W: rsync if possible. (I'll still publish, but your published replica W: will have the same permissions as the source replica, which is probably W: not what you want.) END_WARNING } # Set directories to be globally +rx, files to be globally +r # note - this frobs the permissions on the *sending* side; the # receiving side's umask is still applied -- this option just # allows you to publish a replica stored in a private directory # and have it have the receiving end's default permissions, even # if those are more permissive than the original location push @args, '--chmod=Da+rx,a+r'; push @args, '--verbose' if $self->context->has_arg('verbose'); push @args, '--progress' if $self->context->has_arg('progress'); # avoid edge cases when exporting replicas! still update files even # if they have the same size and time. # (latest-sequence-no is a file that can fall into this trap, since it's # ~easy for it to have the same size as it was previously and in test # cases we sometimes export to the same directory in quick succession) push @args, '--ignore-times'; if ( $^O =~ /MSWin/ ) { require Win32; for (qw/from to/) { # convert old 8.3 name $args{$_} = Win32::GetLongPathName($args{$_}); # cwrsync uses cygwin $args{$_} =~ s!^([A-Z]):!'/cygdrive/' . lc $1!eg; $args{$_} =~ s!\\!/!g; $args{$_} = q{"} . $args{$_} . q{"}; } } push @args, '-e', $args{shell} if defined $args{shell}; push @args, '--recursive', '--' , $args{from}, $args{to}; my $ret = system($rsync, @args); if ($ret == -1) { die <<'END_DIE_MSG'; You must have 'rsync' installed to use this command. If you have rsync but it's not in your path, set the environment variable $RSYNC to the absolute path of your rsync executable. END_DIE_MSG } elsif ($ret != 0) { die "Publish NOT completed! (rsync failed with return value $ret)\n"; } else { return $ret; } } no Any::Moose 'Role'; 1; prophet-0.750/lib/Prophet/CLI/RecordCommand.pm000066400000000000000000000046631160607302300210660ustar00rootroot00000000000000package Prophet::CLI::RecordCommand; use Any::Moose 'Role'; use Params::Validate; use Prophet::Record; has type => ( is => 'rw', isa => 'Str', required => 0, predicate => 'has_type', ); has uuid => ( is => 'rw', isa => 'Str', required => 0, predicate => 'has_uuid', ); has record_class => ( is => 'rw', isa => 'Prophet::Record', ); =head2 _get_record_object [{ type => 'type' }] Tries to determine a record class from either the given type argument or the current object's C<$type> attribute. Returns a new instance of the record class on success, or throws a fatal error with a stack trace on failure. =cut sub _get_record_object { my $self = shift; my %args = validate(@_, { type => { default => $self->type }, }); my $constructor_args = { app_handle => $self->cli->app_handle, handle => $self->cli->handle, type => $args{type}, }; if ($args{type}) { my $class = $self->_type_to_record_class($args{type}); return $class->new($constructor_args); } elsif (my $class = $self->record_class) { Prophet::App->require($class); return $class->new($constructor_args); } else { $self->fatal_error("I couldn't find that record. (You didn't specify a record type.)"); } } =head2 _load_record Attempts to load the record specified by the C attribute. Returns the loaded record on success, or throws a fatal error if no record can be found. =cut sub _load_record { my $self = shift; my $record = $self->_get_record_object; $record->load( uuid => $self->uuid ); if (! $record->exists) { $self->fatal_error("I couldn't find a " . $self->type . ' with that id.'); } return $record; } =head2 _type_to_record_class $type Takes a type and tries to figure out a record class name from it. Returns C<'Prophet::Record'> if no better class name is found. =cut sub _type_to_record_class { my $self = shift; my $type = shift; my $try = $self->cli->app_class . "::Model::" . ucfirst( lc($type) ); Prophet::App->try_to_require($try); # don't care about fails return $try if ( $try->isa('Prophet::Record') ); $try = $self->cli->app_class . "::Record"; Prophet::App->try_to_require($try); # don't care about fails return $try if ( $try->isa('Prophet::Record') ); return 'Prophet::Record'; } no Any::Moose 'Role'; 1; prophet-0.750/lib/Prophet/CLI/TextEditorCommand.pm000066400000000000000000000077211160607302300217410ustar00rootroot00000000000000package Prophet::CLI::TextEditorCommand; use Any::Moose 'Role'; use Params::Validate qw/validate/; requires 'process_template'; =head2 separator_pattern A pattern that will match on lines that count as section separators in record templates. Separator string text is remembered as C<$1>. =cut use constant separator_pattern => qr/^=== (.*) ===$/; =head2 comment_pattern A pattern that will match on lines that count as comments in record templates. =cut use constant comment_pattern => qr/^\s*#/; =head2 build_separator $text Takes a string and returns it in separator form. A separator is a line of text that denotes a section in a template. =cut sub build_separator { my $self = shift; my $text = shift; return "=== $text ==="; } =head2 build_template_section header => '=== foo ===' [, data => 'bar'] Takes a header text string and (optionally) a data string and formats them into a template section. =cut sub build_template_section { my $self = shift; my %args = validate (@_, { header => 1, data => 0 }); return $self->build_separator($args{'header'}) ."\n\n". ( $args{data} || ''); } =head2 try_to_edit template => \$tmpl [, record => $record ] Edits the given template if possible. Passes the updated template in to process_template (errors in the updated template must be handled there, not here). =cut sub try_to_edit { my $self = shift; my %args = validate( @_, { template => 1, record => 0, } ); my $template = ${ $args{template} }; # do the edit my $updated = $self->edit_text($template); die "Aborted.\n" if $updated eq $template; # user didn't change anything $self->process_template( template => $args{template}, edited => $updated, record => $args{record} ); } =head2 handle_template_errors error => 'foo', template_ref => \$tmpl_str, bad_template => 'bar', rtype => 'ticket' Should be called in C if errors (usually validation ones) occur while processing a record template. This method prompts the user to re-edit and updates the template given by C to contain the bad template (given by the arg C prefixed with the error messages given in the C arg. If an errors section already exists in the template, it is replaced with an errors section containing the new errors. If the template you are editing is not section-based, you can override what will be prepended to the template by passing in the C argument, and passing in C if a template errors out repeatedly and there are old errors in the template that need to be replaced. Other arguments are: C: the type of the record being edited. All arguments except overrides (C and C are required. =cut sub handle_template_errors { my $self = shift; my %args = validate( @_, { error => 1, template_ref => 1, bad_template => 1, rtype => 1, errors_pattern => 0, old_errors => 0 } ); my $errors_pattern = defined $args{errors_pattern} ? $args{errors_pattern} : "=== errors in this $args{rtype} ==="; $self->prompt_Yn("Whoops, an error occurred processing your $args{rtype}.\nTry editing again? (Errors will be shown.)") || die "Aborted.\n"; # template is section-based if ( !defined $args{old_errors} ) { # if the bad template already has an errors section in it, remove it $args{bad_template} =~ s/$errors_pattern.*?\n(?==== .*? ===\n)//s; } # template is not section-based: we allow passing in the old error to kill else { $args{bad_template} =~ s/\Q$args{old_errors}\E\n\n\n//; } ${ $args{'template_ref'} } = ($errors_pattern ? "$errors_pattern\n\n" : '') . $args{error} . "\n\n\n" . $args{bad_template}; return 0; } =head1 calling code must implement run process_template =cut no Any::Moose 'Role'; 1; prophet-0.750/lib/Prophet/CLIContext.pm000066400000000000000000000173431160607302300177150ustar00rootroot00000000000000package Prophet::CLIContext; use Any::Moose; has app_handle => ( is => 'rw', isa => 'Prophet::App', lazy => 1, handles => [qw/handle resdb_handle config/], weak_ref => 1, default => sub { return $_[0]->app_class->new; }, ); has uuid => ( is => 'rw', isa => 'Str', predicate => 'has_uuid', documentation => "This is the uuid set by the user from the commandline", ); has type => ( is => 'rw', isa => 'Str', documentation => "This is the type set by the user from the commandline", ); has args => ( is => 'rw', isa => 'HashRef', default => sub { {} }, documentation => "This is a reference to the key-value pairs passed in on the commandline", ); has raw_args => ( is => 'rw', isa => 'ArrayRef', default => sub {[]}, ); sub set_arg { $_[0]->args->{$_[1]} = $_[2] } sub arg { $_[0]->args->{$_[1]} } sub has_arg { exists $_[0]->args->{$_[1]} } sub delete_arg { delete $_[0]->args->{$_[1]} } sub arg_names { keys %{ $_[0]->args } } sub clear_args { %{ $_[0]->args } = () } has props => ( is => 'rw', isa => 'HashRef', default => sub { {} }, ); sub set_prop { $_[0]->props->{$_[1]} = $_[2] } sub prop { $_[0]->props->{$_[1]} } sub has_prop { exists $_[0]->props->{$_[1]} } sub delete_prop { delete $_[0]->props->{$_[1]} } sub prop_names { keys %{ $_[0]->props } } sub clear_props { %{ $_[0]->props } = (); # clear the prop_set too! $_[0]->prop_set( () ); } has prop_set => ( is => 'rw', isa => 'ArrayRef', default => sub { [] }, auto_deref => 1, ); sub add_to_prop_set { my $self = shift; my $args = shift; push @{ $self->prop_set }, $args; $self->set_prop( $args->{prop} => $args->{value} ); } has primary_commands => ( is => 'rw', isa => 'ArrayRef', documentation => "The commands the user executes from the commandline", ); =head2 mutate_attributes ( args => $hashref, props => $hashref, type => 'str' ) A hook for running a second command from within a command without having to use the commandline argument parsing. If C, C, or C are not passed in, the values from the previous command run are used. =cut sub mutate_attributes { my $self = shift; my %args = @_; $self->clear_args(); $self->clear_props(); if ( my $cmd_args = $args{args} ) { for my $arg ( keys %$cmd_args ) { if ( $arg eq 'uuid' ) { $self->uuid( $cmd_args->{$arg} ); } $self->set_arg( $arg => $cmd_args->{$arg} ); } } if ( my $props = $args{props} ) { for my $prop (@$props) { my $key = $prop->{prop}; my $value = $prop->{value}; $self->set_prop( $key => $value ); } } if ( my $type = $args{type} ) { $self->type($type); } if ( my $primary_commands = $args{ $self->primary_commands } ) { $self->primary_commands( $primary_commands ); } } =head2 cmp_regex The regex to use for matching property key/value separators. =cut use constant cmp_regex => '!=|<>|=~|!~|=|\bne\b'; =head2 $ID_REGEX The regex to use for matching the id argument (luid / uuid). =cut our $ID_REGEX = qr'(?:\d+|[0-9a-fA-F\-]{32,36}|[A-Za-z0-9\-\_]{22})'; =head2 setup_from_args Sets up this context object's arguments and key/value pairs from an array that looks like an @ARGV. =cut sub setup_from_args { my $self = shift; $self->raw_args([@_]); $self->parse_args(@_); $self->set_type_and_uuid(); } =head2 parse_args @args This routine pulls arguments (specified by --key=value or --key value or -k value) and properties (specified by --props key=value or -- key=value) as passed on the command line out of ARGV (or something else emulating ARGV) and sticks them in L or L and L as necessary. Argument keys have leading "--" or "-" stripped. If a key is not given a value on the command line, its value is set to undef. More complicated separators such as =~ (for regexes) are also handled (see L for details). =cut sub parse_args { my $self = shift; my @args = (@_); my @primary; push @primary, shift @args while ( $args[0] && $args[0] !~ /^-/ ); my $collecting_props = 0; $self->primary_commands( \@primary ); my $cmp_re = $self->cmp_regex; while ( my $name = shift @args ) { die "$name doesn't look like --argument\n" if !$collecting_props && $name !~ /^-/; if ( $name eq '--' || $name eq '--props' ) { $collecting_props = 1; next; } my $cmp = '='; my $val; ( $name, $cmp, $val ) = ( $1, $2, $3 ) if $name =~ /^(.*?)($cmp_re)(.*)$/; $name =~ s/^(?:--|-)//; # no value specified, pull it from the next argument, unless the next # argument is another option if ( !defined($val) ) { $val = shift @args if @args && $args[0] !~ /^-/; no warnings 'uninitialized'; # but wait! does the value look enough like a comparator? if so, # shift off another one (if we can) if ($collecting_props) { if ( $val =~ /^(?:$cmp_re)$/ && @args && $args[0] !~ /^--/ ) { $cmp = $val; $val = shift @args; } else { # perhaps they said "foo =~bar".. $cmp = $1 if $val =~ s/^($cmp_re)//; } } } if ($collecting_props) { $self->add_to_prop_set( { prop => $name, cmp => $cmp, value => $val, } ); } else { $self->set_arg( $name => $val ); } } } =head2 set_type_and_uuid When working with individual records, it is often the case that we'll be expecting a --type argument and then a mess of other key-value pairs. This routine figures out and sets C and C from the arguments given on the command-line, if possible. Being unable to figure out a uuid is fatal. =cut sub set_type_and_uuid { my $self = shift; $self->set_uuid; $self->set_type; } sub set_uuid { my $self = shift; if ( my $id = $self->delete_arg('id') ) { if ( $id =~ /^(\d+)$/ ) { $self->set_arg( luid => $id ); } else { $self->set_arg( uuid => $id ); } } if ( my $uuid = $self->delete_arg('uuid') ) { $self->uuid($uuid); } elsif ( my $luid = $self->delete_arg('luid') ) { my $uuid = $self->handle->find_uuid_by_luid( luid => $luid ); die "I have no UUID mapped to the local id '$luid'\n" if !defined($uuid); $self->uuid($uuid); } } sub set_type { my $self = shift; if ( my $type = $self->delete_arg('type') ) { $self->type($type); } # allowance for things like ticket show 77, where 'ticket' is the type elsif ( $self->primary_commands->[-1] && $self->primary_commands->[-1] =~ qr/^$Prophet::CLIContext::ID_REGEX$/i && $self->primary_commands->[-3] ) { $self->type( $self->primary_commands->[-3] ); } elsif ( $self->primary_commands->[-2] ) { $self->type( $self->primary_commands->[-2] ); } } sub set_id_from_primary_commands { my $self = shift; if ( (my $id = pop @{$self->primary_commands}) =~ $ID_REGEX ) { $self->set_id($id); } } sub set_id { my $self = shift; my $id = shift; $self->set_arg( id => $id ); $self->set_uuid; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Change.pm000066400000000000000000000110361160607302300171170ustar00rootroot00000000000000package Prophet::Change; use Any::Moose; use Prophet::Meta::Types; use Prophet::PropChange; use Params::Validate; has record_type => ( is => 'rw', isa => 'Str', ); has record_uuid => ( is => 'rw', isa => 'Str', ); has change_type => ( is => 'rw', isa => 'Prophet::Type::ChangeType', ); has resolution_cas => ( is => 'rw', isa => 'Str', ); has is_resolution => ( is => 'rw', isa => 'Bool', ); has prop_changes => ( is => 'rw', isa => 'ArrayRef', auto_deref => 1, default => sub { [] }, ); sub has_prop_changes { scalar @{ $_[0]->prop_changes } } sub _add_prop_change { my $self = shift; push @{ $self->prop_changes }, @_; } =head1 NAME Prophet::Change =head1 DESCRIPTION This class encapsulates a change to a single record in a Prophet replica. =head1 METHODS =head2 record_type The record type for the record. =head2 record_uuid The UUID of the record being changed. =head2 change_type One of C, C, C, C. =head2 is_resolution A boolean value specifying whether this change represents a conflict resolution or not. =head2 prop_changes [\@PROPCHANGES] Returns a list of Ls associated with this Change. Takes an optional arrayref to fully replace the set of propchanges. =head2 has_prop_changes Returns true if this change contains any Ls and false if it doesn't. =head2 new_from_conflict $conflict Takes a L object and creates a Prophet::Change object representing the conflict resolution. =cut sub new_from_conflict { my ( $class, $conflict ) = @_; my $self = $class->new( { is_resolution => 1, resolution_cas => $conflict->fingerprint, change_type => $conflict->change_type, record_type => $conflict->record_type, record_uuid => $conflict->record_uuid } ); return $self; } =head2 add_prop_change { new => __, old => ___, name => ___ } Adds a new L to this L. Takes a C, and the C and C values. =cut sub add_prop_change { my $self = shift; my %args = (@_); # validate is far too heavy to be called here # my %args = validate( @_, { name => 1, old => 0, new => 0 } ); my $change = Prophet::PropChange->new( name => $args{'name'}, old_value => $args{'old'}, new_value => $args{'new'}, ); $self->_add_prop_change($change); } =head2 as_hash Returns a reference to a representation of this change as a hash. =cut sub as_hash { my $self = shift; my $props = {}; for my $pc ( $self->prop_changes ) { $props->{ $pc->name } = { old_value => $pc->old_value, new_value => $pc->new_value }; } return { record_type => $self->record_type, change_type => $self->change_type, prop_changes => $props, }; } =head2 as_string ARGS Returns a string representing this change. If C<$args{header_callback}> is specified, the string returned from passing C<$self> to the callback is prepended to the change string before it is returned. =cut sub as_string { my $self = shift; my %args = validate( @_, { header_callback => 0, } ); my $out = ''; my @prop_changes = $self->prop_changes; return '' if @prop_changes == 0; $out .= $args{header_callback}->($self) if ( $args{header_callback} ); for my $summary ( sort grep {defined }(map { $_->summary} @prop_changes)) { $out .= " " . $summary . "\n"; } return $out; } =head2 new_from_hashref HASHREF Takes a reference to a hash representation of a change (such as is returned by L or serialized json) and returns a new Prophet::Change representation of it. This method should be invoked as a class method, not an object method. For example: Cnew_from_hashref($ref_to_change_hash)> =cut sub new_from_hashref { my $class = shift; my $uuid = shift; my $hashref = shift; my $self = $class->new( { record_type => $hashref->{'record_type'}, record_uuid => $uuid, change_type => $hashref->{'change_type'}, } ); for my $prop ( keys %{ $hashref->{'prop_changes'} } ) { $self->add_prop_change( name => $prop, old => $hashref->{'prop_changes'}->{$prop}->{'old_value'}, new => $hashref->{'prop_changes'}->{$prop}->{'new_value'} ); } return $self; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/ChangeSet.pm000066400000000000000000000152011160607302300175710ustar00rootroot00000000000000package Prophet::ChangeSet; use Any::Moose; use Prophet::Change; use Params::Validate; use Digest::SHA qw/sha1_hex/; use JSON; has creator => ( is => 'rw', isa => 'Str|Undef', ); has created => ( is => 'rw', isa => 'Str|Undef', default => sub { my ($sec, $min, $hour, $day, $month, $year) = gmtime; $year += 1900; $month++; return sprintf '%04d-%02d-%02d %02d:%02d:%02d', $year, $month, $day, $hour, $min, $sec; }, ); has source_uuid => ( is => 'rw', isa => 'Str|Undef', ); has sequence_no => ( is => 'rw', isa => 'Int|Undef', ); has original_source_uuid => ( is => 'rw', isa => 'Str', ); has original_sequence_no => ( is => 'rw', isa => 'Int|Undef', ); has is_nullification => ( is => 'rw', isa => 'Bool', ); has is_resolution => ( is => 'rw', isa => 'Bool', ); has changes => ( is => 'rw', isa => 'ArrayRef', auto_deref => 1, default => sub { [] }, ); has sha1 => ( is => 'rw', isa => 'Maybe[Str]' ); sub has_changes { scalar @{ $_[0]->changes } } sub _add_change { my $self = shift; push @{ $self->changes }, @_; } =head1 NAME Prophet::ChangeSet =head1 DESCRIPTION This class represents a single, atomic Prophet database update. It tracks some metadata about the changeset itself and contains a list of L entries which describe the actual records created, updated and deleted. =head1 METHODS =head2 new Instantiate a new, empty L object. =head2 creator A string representing who created this changeset. =head2 created A string representing the ISO 8601 date and time when this changeset was created (UTC). =head2 sequence_no The changeset's sequence number (in subversion terms, revision #) on the replica sending us the changeset. =head2 source_uuid The uuid of the replica sending us the change. =head2 original_source_uuid The uuid of the replica where the change was authored. =head2 original_sequence_no The changeset's sequence number (in subversion terms, revision #) on the replica where the change was originally created. =head2 is_nullification A boolean value specifying whether this is a nullification changeset or not. =head2 is_resolution A boolean value specifying whether this is a conflict resolution changeset or not. =head2 changes Returns an array of all the changes in the current changeset. =head2 has_changes Returns true if this changeset has any changes. =head2 add_change { change => L } Adds a new change to this changeset. =cut sub add_change { my $self = shift; my %args = validate( @_, { change => { isa => 'Prophet::Change' } } ); $self->_add_change($args{change}); } our @SERIALIZE_PROPS = (qw(creator created sequence_no source_uuid original_source_uuid original_sequence_no is_nullification is_resolution)); =head2 as_hash Returns a reference to a representation of this changeset as a hash, containing all the properties in the package variable C<@SERIALIZE_PROPS>, as well as a C key containing hash representations of each change in the changeset, keyed on UUID. =cut sub as_hash { my $self = shift; my $as_hash = { map { $_ => $self->$_() } @SERIALIZE_PROPS }; for my $change ( $self->changes ) { $as_hash->{changes}->{ $change->record_uuid } = $change->as_hash; } return $as_hash; } =head2 new_from_hashref HASHREF Takes a reference to a hash representation of a changeset (such as is returned by L or serialized json) and returns a new Prophet::ChangeSet representation of it. Should be invoked as a class method, not an object method. For example: Cnew_from_hashref($ref_to_changeset_hash)> =cut sub new_from_hashref { my $class = shift; my $hashref = shift; my $self = $class->new( { map { $_ => $hashref->{$_} } @SERIALIZE_PROPS } ); for my $change ( keys %{ $hashref->{changes} } ) { $self->add_change( change => Prophet::Change->new_from_hashref( $change => $hashref->{changes}->{$change} ) ); } return $self; } =head2 as_string ARGS Returns a single string representing the changes in this changeset. If C<$args{header_callback}> is defined, the string returned from passing C<$self> to the callback is prepended to the changeset string before it is returned (instead of L). If C<$args{skip_empty}> is defined, an empty string is returned if the changeset contains no changes. The argument C can be used to filter certain changes from the string representation; the function is passed a change and should return false if that change should be skipped. The C argument, if present, is passed to C<$change-Eto_string> when individual changes are converted to strings. =cut sub as_string { my $self = shift; my %args = validate( @_, { change_filter => 0, change_header => 0, change_formatter => undef, header_callback => 0, skip_empty => 0 } ); my $body = ''; for my $change ( $self->changes ) { next if $args{change_filter} && !$args{change_filter}->($change); if ($args{change_formatter} ) { $body .= $args{change_formatter}->(change => $change, header_callback => $args{change_header}); } else { $body .= $change->as_string( header_callback => $args{change_header} ) || next; $body .= "\n"; } } return '' if !$body && $args{'skip_empty'}; my $header = $args{header_callback} ? $args{header_callback}->($self) : $self->description_as_string; my $out = $header .$body; return $out; } =head2 description_as_string Returns a string representing a description of this changeset. =cut sub description_as_string { my $self = shift; sprintf " %s at %s\t\(%d@%s)\n", ( $self->creator || '(unknown)' ), $self->created, $self->original_sequence_no, $self->original_source_uuid; } sub created_as_rfc3339 { my $self = shift; my $c = $self->created; $c =~ s/ /T/; return $c."Z"; } sub calculate_sha1 { my $self = shift; return sha1_hex($self->canonical_json_representation); } sub canonical_json_representation { my $self = shift; my $hash_changeset = $self->as_hash; # These two things should never actually get stored delete $hash_changeset->{'sequence_no'}; delete $hash_changeset->{'source_uuid'}; return to_json( $hash_changeset, { canonical => 1, pretty => 0, utf8 => 1 } ); } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Collection.pm000066400000000000000000000040641160607302300200300ustar00rootroot00000000000000package Prophet::Collection; use Any::Moose; use Params::Validate; use Prophet::Record; use overload '@{}' => sub { shift->items }, fallback => 1; use constant record_class => 'Prophet::Record'; has app_handle => ( is => 'rw', isa => 'Prophet::App|Undef', required => 0, trigger => sub { my ($self, $app) = @_; $self->handle($app->handle); }, ); has handle => ( is => 'rw', isa => 'Prophet::Replica', ); has type => ( is => 'rw', isa => 'Str', lazy => 1, default => sub { my $self = shift; $self->record_class->new(app_handle => $self->app_handle)->record_type; }, ); has items => ( is => 'rw', isa => 'ArrayRef', default => sub { [] }, auto_deref => 1, ); sub count { scalar @{ $_[0]->items } } sub add_item { my $self = shift; push @{ $self->items }, @_; } =head1 NAME Prophet::Collection =head1 DESCRIPTION This class allows the programmer to search for L objects matching certain criteria and to operate on those records as a collection. =head1 METHODS =head2 new { handle => L, type => $TYPE } Instantiate a new, empty L object to find items of type C<$TYPE>. =head2 matching $CODEREF Find all Ls of this collection's C where $CODEREF returns true. =cut sub matching { my $self = shift; my $coderef = shift; # return undef unless $self->handle->type_exists( type => $self->type ); # find all items, Carp::cluck unless defined $self->type; my $records = $self->handle->list_records( record_class => $self->record_class, type => $self->type ); # run coderef against each item; # if it matches, add it to items for my $record (@$records) { $self->add_item($record) if ( $coderef->($record) ); } # XXX TODO return a count of items found } =head2 items Returns a reference to an array of all the items found =head2 add_item =head2 count =cut __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Config.pm000066400000000000000000000143651160607302300171470ustar00rootroot00000000000000package Prophet::Config; use Any::Moose; use File::Spec; use Prophet::Util; extends 'Config::GitLike'; has app_handle => ( is => 'ro', weak_ref => 1, isa => 'Prophet::App', required => 1 ); use constant FORMAT_VERSION => 0; # reload config after setting values override group_set => sub { my $self = shift; my ($filename, $args_ref, $override) = @_; # Set a config format version on this config file if # it doesn't have one already. unshift @$args_ref, { key => 'core.config-format-version', value => $self->FORMAT_VERSION, } unless _file_has_config_format_version( $filename ); $self->SUPER::group_set($filename, $args_ref); $self->load unless $override; }; sub _file_has_config_format_version { my $filename = shift; my $content = -f $filename ? Prophet::Util->slurp($filename) : ''; return $content =~ 'core.config-format-version'; } # per-replica config filename override dir_file => sub { 'config' }; # Override the replica config file with the PROPHET_APP_CONFIG # env var if it's set. Also, don't walk up the given path if no replica # config is found. override load_dirs => sub { my $self = shift; $self->load_file( $self->replica_config_file ) if -f $self->replica_config_file; }; # If PROPHET_APP_CONFIG is set, don't load anything else override user_file => sub { my $self = shift; return exists $ENV{PROPHET_APP_CONFIG} ? '' : $self->SUPER::user_file(@_); }; override global_file => sub { my $self = shift; return exists $ENV{PROPHET_APP_CONFIG} ? '' : $self->SUPER::global_file(@_); }; # grab all values in the 'alias' section (of the file, if given) and strip # away the section name sub aliases { my $self = shift; my $file = shift; my %new_aliases; if ( $file ) { # parse the given config file with parse_content and use the # callbacks to add to an array my $content = -f $file ? Prophet::Util->slurp( $file ) : ''; $self->parse_content( content => $content, callback => sub { my %args = @_; return unless defined $args{name}; if ( $args{section} eq 'alias' ) { $new_aliases{$args{name}} = $args{value}; } }, # Most of the time this error sub won't get triggered since # Prophet loads the config file whenever it first tries to use # a value from the config file, and errors are detected at that # point. This always happens before this since every command # triggers alias processing. So this should really only explode # if we're running a shell and the config file has changed # in a bad way since we started up. error => sub { Config::GitLike::error_callback( @_, filename => $file ); }, ); } else { my %aliases = $self->get_regexp( key => '^alias\.' ); %new_aliases = map { my $alias = $_; $alias =~ s/^alias\.//; ( $alias => $aliases{$_} ); } keys %aliases; } return wantarray ? %new_aliases : \%new_aliases; } # grab all the replicas we know of and return a hash of # name => variable, or variable => name if $args{by_variable} is true sub sources { my $self = shift; my %args = ( by_url => undef, variable => 'url', @_, ); my %sources = $self->get_regexp( key => "^replica[.].*[.]$args{variable}\$" ); my %new_sources = map { $_ =~ /^replica\.(.*)\.$args{variable}$/; $args{by_variable} ? ( $sources{$_} => $1 ) : ( $1 => $sources{$_} ); } keys %sources; return wantarray ? %new_sources : \%new_sources; } sub replica_config_file { my $self = shift; return exists $ENV{PROPHET_APP_CONFIG} ? $ENV{PROPHET_APP_CONFIG} : Prophet::Util->catfile( $self->app_handle->handle->fs_root, $self->dir_file ); } sub _file_if_exists { my $self = shift; my $file = shift || ''; # quiet warnings return (-e $file) ? $file : ''; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; __END__ =head1 NAME Prophet::Config =head1 SYNOPSIS From, for example, a class that inherits from Prophet::App: has config => ( is => 'rw', isa => 'Prophet::Config', default => sub { my $self = shift; return Prophet::Config->new( app_handle => $self, confname => 'prophetrc', ); }, ); =head1 DESCRIPTION This class represents the configuration of Prophet and the application built on top of it. It's just an instance of L with a few small customizations and additions. =head1 METHODS =head2 new( confname => 'prophetrc', app_handle => $instance_of_prophet_app ) Initialize the configuration. Does NOT load the config for you! You need to call L for that. The configuration will also load automatically the first time your prophet application tries to L a config variable. Both constructor arguments are required. =head2 replica_config_file The replica-specific configuration file, or the configuration file given by C if that environmental variable is set. =head2 aliases( $config_filename ) A convenience method that gets you a hash (or a hashref, depending on context) of all currently defined aliases. (Basically, every entry in the 'alias' section of the config file.) If a filename is passed in, this method will only return the aliases that are defined in that particular config file. =head2 sources A convenience method that gets you a hash (or a hashref, depending on context) of all currently defined source replicas, in the format { 'name' => 'URL' }, or { 'URL' => 'name' } if the argument C is passed in. =head1 CONFIG VARIABLES The following config variables are currently used in various places in Prophet: .summary-format record.summary-format user.email-address alias. =head1 SEE ALSO Most of the useful methods for getting and setting configuration variables actually come from L. See that module's documentation for details. prophet-0.750/lib/Prophet/Conflict.pm000066400000000000000000000211771160607302300175020ustar00rootroot00000000000000package Prophet::Conflict; use Any::Moose; use Params::Validate; use Prophet::ConflictingPropChange; use Prophet::ConflictingChange; has prophet_handle => ( is => 'rw', isa => 'Prophet::Replica', ); has resolvers => ( is => 'rw', isa => 'ArrayRef', default => sub { [] }, auto_deref => 1, ); has changeset => ( is => 'rw', isa => 'Prophet::ChangeSet', ); has nullification_changeset => ( is => 'rw', isa => 'Prophet::ChangeSet', ); has resolution_changeset => ( is => 'rw', isa => 'Prophet::ChangeSet', ); has autoresolved => ( is => 'rw', isa => 'Bool', ); has conflicting_changes => ( is => 'ro', isa => 'ArrayRef', default => sub { [] }, ); sub has_conflicting_changes { scalar @{ $_[0]->conflicting_changes } } sub add_conflicting_change { my $self = shift; push @{ $self->conflicting_changes }, @_; } =head2 analyze_changeset Prophet::ChangeSet Take a look at a changeset. if there are any conflicts, populate the L array on this object with a set of L objects. =cut sub analyze_changeset { my $self = shift; #my ($changeset) = validate_pos( @_, { isa => 'Prophet::ChangeSet' } ); $self->generate_changeset_conflicts(); return unless $self->has_conflicting_changes; $self->generate_nullification_changeset; return 1; } use Prophet::Resolver::IdenticalChanges; use Prophet::Resolver::FromResolutionDB; use Prophet::Resolver::Fixup::MissingSourceOldValues; use Prophet::Resolver::Failed; use Prophet::Resolver::Prompt; sub generate_resolution { my $self = shift; my $resdb = shift; my @resolvers = ( sub { Prophet::Resolver::IdenticalChanges->new->run(@_); }, $resdb ? sub { Prophet::Resolver::FromResolutionDB->new->run(@_) } : (), $self->resolvers, sub { Prophet::Resolver::Fixup::MissingSourceOldValues->new->run(@_)}, (-t STDIN && -t STDOUT) ? sub { Prophet::Resolver::Prompt->new->run(@_) } : (), sub { Prophet::Resolver::Failed->new->run(@_) }, ); my $resolutions = Prophet::ChangeSet->new({ creator => $self->prophet_handle->changeset_creator, is_resolution => 1, }); for my $conflicting_change ( @{ $self->conflicting_changes } ) { for (@resolvers) { if ( my $resolution = $_->( $conflicting_change, $self, $resdb ) ) { $resolutions->add_change( change => $resolution ) if $resolution->has_prop_changes; last; } } } $self->resolution_changeset($resolutions); return 1; } =head2 generate_changeset_conflicts Given a changeset, populates $self->conflicting_changes with all the conflicts that applying that changeset to the target replica would result in. =cut sub generate_changeset_conflicts { my $self = shift; for my $change ( $self->changeset->changes ) { if ( my $change_conflicts = $self->_generate_change_conflicts($change) ) { $self->add_conflicting_change($change_conflicts); } } } =head2 _generate_change_conflicts Prophet::Change Given a change, generates a set of Prophet::ConflictingChange entries. =cut sub _generate_change_conflicts { my $self = shift; my ($change) = validate_pos( @_, { isa => "Prophet::Change" } ); my $file_op_conflict; my $file_exists = $self->prophet_handle->record_exists( uuid => $change->record_uuid, type => $change->record_type ); # It's ok to delete a record that exists if ( $change->change_type eq 'delete' && !$file_exists ) { $file_op_conflict = "delete_missing_file"; } elsif ( $change->change_type eq 'update_file' && !$file_exists ) { $file_op_conflict = "update_missing_file"; } elsif ( $change->change_type eq 'add_file' && $file_exists ) { # we can recover from "Trying to add a file which exists" by converting it to an "update file" # operation. This should ONLY ever happen on settings conflicts $change->change_type('update_file'); } elsif ( $change->change_type eq 'add_dir' && $file_exists ) { # XXX TODO: this isn't right $file_op_conflict = "create_existing_dir"; } my $change_conflict = Prophet::ConflictingChange->new( { record_type => $change->record_type, record_uuid => $change->record_uuid, target_record_exists => ($file_exists ? 1 : 0 ), change_type => $change->change_type, $file_op_conflict ? ( file_op_conflict => $file_op_conflict ) : (), } ); if ($file_exists) { my $current_state = $self->prophet_handle->get_record_props( uuid => $change->record_uuid, type => $change->record_type ); $change_conflict->add_prop_conflict( $self->_generate_prop_change_conflicts( $change, $current_state ) ); } return ( $change_conflict->has_prop_conflicts || $file_op_conflict ) ? $change_conflict : undef; } =head2 _generate_prop_change_conflicts Prophet::Change %hash_of_current_properties Given a change and the current state of a record, returns an array of Prophet::ConflictingPropChange objects describing conflicts which would occur if the change were applied =cut sub _generate_prop_change_conflicts { my $self = shift; my $change = shift; my $current_state = shift; my @prop_conflicts; for my $prop_change ( $change->prop_changes ) { # skip properties added by the change next if ( !defined $current_state->{ $prop_change->name } && !defined $prop_change->old_value ); # If either the old version didn't have a value or the delta didn't have a value, then we know there's a conflict my $s = { name => $prop_change->name, source_old_value => $prop_change->old_value, target_value => $current_state->{ $prop_change->name }, source_new_value => $prop_change->new_value }; my $old_exists = ( defined $prop_change->old_value && $prop_change->old_value ne '' ) ? 1 : 0; my $current_exists = exists $current_state->{ $prop_change->name } ? 1 : 0; no warnings 'uninitialized'; if ( ( $current_exists != $old_exists) || ( $current_state->{ $prop_change->name } ne $prop_change->old_value ) ) { push @prop_conflicts, Prophet::ConflictingPropChange->new($s); } } return @prop_conflicts; } =head2 generate_nullification_changeset In order to record a changeset which might not apply cleanly to the current state of a replica, Prophet generates a I. That is, a changeset which sets the state of the replica back to what it needs to be in order to apply the new changeset. This routine computes a new L which contains everything needed to nullify the conflicting state of the replica. =cut sub generate_nullification_changeset { my $self = shift; my $nullification = Prophet::ChangeSet->new({ is_nullification => 1, creator => undef, created => undef, }); for my $conflict ( @{ $self->conflicting_changes } ) { my $nullify_conflict = Prophet::Change->new( { record_type => $conflict->record_type, record_uuid => $conflict->record_uuid } ); my $file_op_conflict = $conflict->file_op_conflict || ''; if ( $file_op_conflict eq "delete_missing_file" ) { $nullify_conflict->change_type('add_file'); } elsif ( $file_op_conflict eq "update_missing_file" ) { $nullify_conflict->change_type('add_file'); } elsif ( $file_op_conflict eq "create_existing_file" ) { $nullify_conflict->change_type('delete'); } elsif ( $file_op_conflict ) { die "We don't know how to deal with a conflict of type " . $conflict->file_op_conflict; } else { $nullify_conflict->change_type('update_file'); } # now that we've sorted out all the file-level conflicts, we need to get properties in order for my $prop_conflict ( @{ $conflict->prop_conflicts } ) { $nullify_conflict->add_prop_change( name => $prop_conflict->name, old => $prop_conflict->target_value, new => $prop_conflict->source_old_value ); } $nullification->add_change( change => $nullify_conflict ); } $self->nullification_changeset($nullification); } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/ConflictingChange.pm000066400000000000000000000031751160607302300213040ustar00rootroot00000000000000package Prophet::ConflictingChange; use Any::Moose; use Prophet::Meta::Types; use Prophet::ConflictingPropChange; use JSON 'to_json'; use Digest::SHA 'sha1_hex'; has record_type => ( is => 'rw', isa => 'Str', ); has record_uuid => ( is => 'rw', isa => 'Str', ); has source_record_exists => ( is => 'rw', isa => 'Bool', ); has target_record_exists => ( is => 'rw', isa => 'Bool', ); has change_type => ( is => 'rw', isa => 'Prophet::Type::ChangeType', ); has file_op_conflict => ( is => 'rw', isa => 'Prophet::Type::FileOpConflict', ); has prop_conflicts => ( is => 'rw', isa => 'ArrayRef', default => sub { [] }, ); sub has_prop_conflicts { scalar @{ $_[0]->prop_conflicts } } sub add_prop_conflict { my $self = shift; push @{ $self->prop_conflicts }, @_; } sub as_hash { my $self = shift; my $struct = { map { $_ => $self->$_() } ( qw/record_type record_uuid source_record_exists target_record_exists change_type file_op_conflict/ ) }; for ( @{ $self->prop_conflicts } ) { push @{ $struct->{'prop_conflicts'} }, $_->as_hash; } return $struct; } =head2 fingerprint Returns a fingerprint of the content of this conflicting change =cut sub fingerprint { my $self = shift; my $struct = $self->as_hash; for ( @{ $struct->{prop_conflicts} } ) { $_->{choices} = [ sort grep { defined} ( delete $_->{source_new_value}, delete $_->{target_value} ) ]; } return sha1_hex(to_json($struct, {utf8 => 1, canonical => 1})); } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/ConflictingPropChange.pm000066400000000000000000000022471160607302300221440ustar00rootroot00000000000000package Prophet::ConflictingPropChange; use Any::Moose; has name => ( is => 'rw', isa => 'Str', ); has source_old_value => ( is => 'rw', isa => 'Str|Undef', ); has target_value => ( is => 'rw', isa => 'Str|Undef', ); has source_new_value => ( is => 'rw', isa => 'Str|Undef', ); =head1 NAME Prophet::ConflictingPropChange =head1 DESCRIPTION Objects of this class describe a case when the a property change can not be cleanly applied to a replica because the old value for the property locally did not match the "begin state" of the change being applied. =head1 METHODS =head2 name The property name for the conflict in question =head2 source_old_value The inital (old) state from the change being merged in =head2 source_new_value The final (new) state of the property from the change being merged in. =head2 target_value The current target-replica value of the property being merged. =cut sub as_hash { my $self = shift; my $hashref = {}; for (qw(name source_old_value target_value source_new_value)) { $hashref->{$_} = $self->$_ } return $hashref; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/ContentAddressedStore.pm000066400000000000000000000015751160607302300222070ustar00rootroot00000000000000package Prophet::ContentAddressedStore; use Any::Moose; use JSON; use Digest::SHA qw(sha1_hex); has fs_root => ( is => 'rw', ); has root => ( isa => 'Str', is => 'rw', ); sub write { my ($self, $content) = @_; $content = $$content if ref($content) eq 'SCALAR'; $content = to_json( $content, { canonical => 1, pretty => 0, utf8 => 1 } ) if ref($content); my $fingerprint = sha1_hex($content); Prophet::Util->write_file( file => $self->filename($fingerprint, 1), content => $content ); return $fingerprint; } sub filename { my ($self, $key, $full) = @_; Prophet::Util->catfile( $full ? $self->fs_root : (), $self->root => Prophet::Util::hashed_dir_name($key) ); } __PACKAGE__->meta->make_immutable(); no Any::Moose; 1; prophet-0.750/lib/Prophet/DatabaseSetting.pm000066400000000000000000000027471160607302300210050ustar00rootroot00000000000000package Prophet::DatabaseSetting; use Any::Moose; extends 'Prophet::Record'; use Params::Validate; use JSON; has default => ( is => 'ro', ); has label => ( isa => 'Str|Undef', is => 'rw', ); has '+type' => ( default => '__prophet_db_settings' ); sub BUILD { my $self = shift; $self->initialize unless ($self->handle->record_exists(uuid => $self->uuid, type => $self->type) ); } sub initialize { my $self = shift; $self->set($self->default); } sub set { my $self = shift; my $entry; if (exists $_[1] || !ref($_[0])) { $entry = [@_]; } else { $entry = shift @_; } my $content = to_json($entry, { canonical => 1, pretty => 0, utf8 => 1, allow_nonref => 0, }); my %props = ( content => $content, label => $self->label, ); if ($self->handle->record_exists( uuid => $self->uuid, type => $self->type)) { $self->set_props(props => \%props); } else { $self->_create_record( uuid => $self->uuid, props => \%props, ); } } sub get_raw { my $self = shift; my $content = $self->prop('content'); return $content; } sub get { my $self = shift; $self->initialize() unless $self->load(uuid => $self->uuid); my $content = $self->get_raw; my $entry = from_json($content, { utf8 => 1 }); return $entry; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/FilesystemReplica.pm000066400000000000000000000213741160607302300213640ustar00rootroot00000000000000package Prophet::FilesystemReplica; use Any::Moose; extends 'Prophet::Replica'; use File::Spec;use Params::Validate qw(:all); use JSON; use Prophet::Util; =head2 replica_exists Returns true if the replica already exists / has been initialized. Returns false otherwise. =cut sub replica_exists { my $self = shift; return $self->uuid ? 1 : 0; } sub can_initialize { my $self = shift; if ( $self->fs_root_parent && -w $self->fs_root_parent ) { return 1; } return 0; } =head2 _file_exists PATH Returns true if PATH is a file or directory in this replica's directory structure =cut sub _file_exists { my $self = shift; my ($file) = validate_pos( @_, 1 ); return $self->backend->file_exists($file); } sub read_file { my $self = shift; my ($file) = validate_pos( @_, 1 ); if ( $self->fs_root ) { # make sure we don't try to read files outside the replica my $qualified_file = Cwd::fast_abs_path( Prophet::Util->catfile( $self->fs_root => $file ) ); return undef if substr( $qualified_file, 0, length( $self->fs_root ) ) ne $self->fs_root; } return $self->_read_file($file); } sub _read_file { my $self = shift; my ($file) = (@_); # validation is too heavy to be called here #my ($file) = validate_pos( @_, 1 ); $self->backend->read_file($file); } sub _write_file { my $self = shift; my %args = (@_); # validate is too heavy to be called here # my %args = validate( @_, { path => 1, content => 1 } ); $self->backend->write_file(%args); } sub read_changeset_index { my $self= shift; $self->log_debug( "Reading changeset index file '" .$self->changeset_index . "'" ); my $chgidx = $self->_read_file( $self->changeset_index ); return \$chgidx; } sub _write_changeset { my $self = shift; my %args = validate( @_, { changeset => { isa => 'Prophet::ChangeSet' } } ); my $changeset = $args{'changeset'}; my $hash_changeset = $changeset->as_hash; # These two things should never actually get stored my $seqno = delete $hash_changeset->{'sequence_no'}; my $uuid = delete $hash_changeset->{'source_uuid'}; my $cas_key = $self->changeset_cas->write( $hash_changeset ); my $changeset_index_line = pack( 'Na16NH40', $seqno, $self->uuid_generator->from_string( $changeset->original_source_uuid ), $changeset->original_sequence_no, $cas_key ); $self->backend->append_to_file($self->changeset_index => $changeset_index_line); } use constant CHG_RECORD_SIZE => ( 4 + 16 + 4 + 20 ); sub _changeset_index_size { my $self = shift; my %args = validate( @_, { index_file => 1 } ); return length(${$args{index_file}})/CHG_RECORD_SIZE; } =head2 traverse_changesets { after => SEQUENCE_NO, callback => sub { } } Walks through all changesets from $after to $until, calling $callback on each. If no $until is specified, the latest changeset is assumed. =cut # each record is : local-replica-seq-no : original-uuid : original-seq-no : cas key # 4 16 4 20 sub traverse_changesets { my $self = shift; my %args = validate( @_, { after => 1, callback => { type => CODEREF }, before_load_changeset_callback => { type => CODEREF, optional => 1 }, reporting_callback => { type => CODEREF, optional => 1 }, until => 0, reverse => 0, load_changesets => { default => 1 } } ); my $first_rev = ( $args{'after'} + 1 ) || 1; my $latest = $self->latest_sequence_no || 0; if ( defined $args{until} && $args{until} < $latest ) { $latest = $args{until}; } #there's no need to iterate if we know there's nothing to read return if ( $first_rev > $latest); $self->log_debug("Traversing changesets between $first_rev and $latest"); my @range = ( $first_rev .. $latest ); @range = reverse @range if $args{reverse}; my $chgidx = $self->read_changeset_index; for my $rev (@range) { $self->log_debug("Fetching changeset $rev"); if ( $args{'before_load_changeset_callback'} ) { my $continue = $args{'before_load_changeset_callback'}->( changeset_metadata => $self->_changeset_index_entry( sequence_no => $rev, index_file => $chgidx ) ); next unless $continue; } my $data; if ( $args{load_changesets} ) { $data = $self->_get_changeset_via_index( sequence_no => $rev, index_file => $chgidx ); $args{callback}->( changeset => $data ); } else { $data = $self->_changeset_index_entry( sequence_no => $rev, index_file => $chgidx ); $args{callback}->( changeset_metadata => $data ); } $args{reporting_callback}->($data) if ( $args{reporting_callback} ); } } sub _changeset_index_entry { my $self = shift; my %args = validate( @_, { sequence_no => 1, index_file => 1 } ); my $chgidx = $args{index_file}; my $rev = $args{'sequence_no'}; my $index_record = substr( $$chgidx, ( $rev - 1 ) * CHG_RECORD_SIZE, CHG_RECORD_SIZE ); my ( $seq, $orig_uuid, $orig_seq, $key ) = unpack( 'Na16NH40', $index_record ); $orig_uuid = $self->uuid_generator->to_string($orig_uuid); $self->log_debug( "REV: $rev - seq $seq - originally $orig_seq from " . substr( $orig_uuid, 0, 6 ) . " data key $key" ); return [ $seq, $orig_uuid, $orig_seq, $key]; } sub _deserialize_changeset { my $self = shift; my %args = validate( @_, { content => 1, original_sequence_no => 1, original_source_uuid => 1, sequence_no => 1 } ); require Prophet::ChangeSet; my $content_struct = from_json( $args{content}, { utf8 => 1 } ); my $changeset = Prophet::ChangeSet->new_from_hashref($content_struct); $changeset->source_uuid( $self->uuid ); $changeset->sequence_no( $args{'sequence_no'} ); $changeset->original_source_uuid( $args{'original_source_uuid'} ); $changeset->original_sequence_no( $args{'original_sequence_no'} ); return $changeset; } sub _get_changeset_via_index { my $self = shift; my %args = validate( @_, { sequence_no => 1, index_file => 1 } ); # XXX: deserialize the changeset content from the cas with $key my ( $seq, $orig_uuid, $orig_seq, $key ) =@{ $self->_changeset_index_entry(%args)}; my $changeset = $self->_deserialize_changeset( content => $self->fetch_serialized_changeset(sha1 => $key), original_source_uuid => $orig_uuid, original_sequence_no => $orig_seq, sequence_no => $seq ); return $changeset; } sub fetch_serialized_changeset { my $self = shift; my %args = validate(@_, { sha1 => 1 }); my $casfile = $self->changeset_cas->filename($args{sha1}); return $self->_read_file($casfile); } =head2 read_userdata_file Returns the contents of the given file in this replica's userdata directory. Returns C if the file does not exist. =cut sub read_userdata { my $self = shift; my %args = validate( @_, { path => 1 } ); $self->_read_file( Prophet::Util->catfile( $self->userdata_dir, $args{path} ) ); } =head2 write_userdata Writes the given string to the given file in this replica's userdata directory. =cut sub write_userdata { my $self = shift; my %args = validate( @_, { path => 1, content => 1 } ); $self->_write_file( path => Prophet::Util->catfile( $self->userdata_dir, $args{path} ), content => $args{content}, ); } sub store_local_metadata { my $self = shift; my $key = shift; my $value = shift; $self->_write_file( path =>Prophet::Util->catfile( $self->local_metadata_dir, lc($key)), content => $value, ); } sub fetch_local_metadata { my $self = shift; my $key = shift; # local metadata files used to (incorrectly) be treated as case sensitive. # The code below tries to make sure that we don't lose historical data as we fix this # If there's a new-style all-lowercase file, read that first. If there isn't, # try to read an old-style sensitive file my $insensitive_file = Prophet::Util->catfile($self->local_metadata_dir, lc($key)); my $sensitive_file = Prophet::Util->catfile($self->local_metadata_dir, $key); return $self->_read_file($insensitive_file) || $self->_read_file($sensitive_file); } no Any::Moose; 1; prophet-0.750/lib/Prophet/ForeignReplica.pm000066400000000000000000000106541160607302300206300ustar00rootroot00000000000000package Prophet::ForeignReplica; use Any::Moose; use Params::Validate qw(:all); extends 'Prophet::Replica'; =head1 NAME Prophet::ForeignReplica =head1 DESCRIPTION This abstract baseclass implements the helpers you need to be able to easily sync a prophet replica with a "second class citizen" replica which can't exactly reconstruct changesets, doesn't use uuids to track records and so on. =head1 METHODS =cut sub fetch_local_metadata { my $self = shift; my $key = shift; return $self->app_handle->handle->fetch_local_metadata( $self->uuid . "-".$key ); } sub store_local_metadata { my $self = shift; my $key = shift; my $value = shift; return $self->app_handle->handle->store_local_metadata( $self->uuid."-".$key => $value); } sub conflicts_from_changeset { return; } sub can_write_changesets {1} sub record_resolutions { die "Resolution handling is not for foreign replicas"; } sub import_resolutions_from_remote_source { warn 'resdb not implemented yet'; return; } =head2 record_changes L Integrate all changes in this changeset. =cut sub record_changes { my $self = shift; my ($changeset) = validate_pos( @_, { isa => 'Prophet::ChangeSet' } ); $self->integrate_changes($changeset); } # XXX TODO = or do these ~always stay stubbed? sub begin_edit { } sub commit_edit { } # foreign replicas never have a db uuid sub db_uuid { return undef } sub uuid_for_url { my ( $self, $url ) = @_; return $self->uuid_generator->create_string_from_url( $url ); } =head2 prompt_for_login Interactively prompt the user for a username and an authentication secret (usually a password). Named parameters: uri username password username_prompt secret_prompt To use the default prompts, which ask for a username and password, pass in C and (optionally) C. Either prompt will be skipped if a value is passed in to begin, making this suitable for use in a login loop that prompts for values and then tests that they work for authentication, looping around if they don't. You can also override the default prompts by passing in subroutines for C and/or C. These subroutines return strings to be printed and are called like this: username_prompt( uri ) secret_prompt( uri, username ) Where C and C are the args that are passed in under those names (if any). You don't need to use them; use a closure if you want something else. =cut sub prompt_for_login { my $self = shift; my %args = ( uri => undef, username => undef, password => undef, secret_prompt => sub { my ($uri, $username) = @_; return "Password for $username: @ $uri: "; }, username_prompt => sub { my ($uri) = shift; return "Username for ${uri}: "; }, @_, ); # check if username and password are in config my $replica_username_key = 'replica.' . $self->scheme .":" . $self->{url} . '.username'; my $replica_token_key = 'replica.' . $self->scheme . ":" . $self->{url} . '.secret_token'; if ( !$args{username} ) { my $check_username = $self->app_handle->config->get( key => $replica_username_key ); $args{username} = $check_username if $check_username; } my $was_in_pager = Prophet::CLI->in_pager(); Prophet::CLI->end_pager(); # XXX belongs to some CLI callback use Term::ReadKey; local $| = 1; unless ($args{username}) { print $args{username_prompt}($args{uri}); ReadMode 1; chomp( $args{username} = ReadLine 0 ); } if ( my $check_password = $self->app_handle->config->get( key => $replica_token_key ) ) { $args{password} = $check_password; } elsif ( !defined($args{password}) ) { print $args{secret_prompt}( $args{uri}, $args{username} ); ReadMode 2; chomp( $args{password} = ReadLine 0 ); ReadMode 1; print "\n"; } Prophet::CLI->start_pager() if ($was_in_pager); return ( $args{username}, $args{password} ); } sub log { my $self = shift; my ($msg) = validate_pos(@_, 1); Carp::confess unless ($self->app_handle); $self->app_handle->log($self->url.": " .$msg); } no Any::Moose; __PACKAGE__->meta->make_immutable; 1; prophet-0.750/lib/Prophet/Manual.pod000066400000000000000000000222451160607302300173210ustar00rootroot00000000000000=head1 NAME Prophet::Manual - What Prophet is, how it works and how to use it =head1 Introduction =head2 What is Prophet? Prophet is a new kind of database designed for the post Web-2.0 world. It's made to let you collaborate with your friends and coworkers without needing any kind of special server or Internet provider. Prophet's buzzword-laden pitch reads something like this: A grounded, semirelational, peer to peer replicated, disconnected, versioned, property database with self-healing conflict resolution. Here is a slideshow describing why Prophet came about: L =head2 How does it work? There are two ways to create a Prophet database: cloning and initing One way is to clone an existing database. When you clone an existing database, a local replica is created for you with the uuid of the cloned database. Another way is to init a database. This creates a database with a new uuid. Anyone who clones from this database will get a replica of the data and share the database uuid. Note that Prophet will prevent you from merging databases unless they have the same database uuid (although you can force the merge of different databases if you want). A Prophet database is composed of records, each of which has several properties. Two core properties are C and C. A record's C indicates the kind of record (comment, ticket, user, etc.) and the C of the record uniquely identifies it so that it can be referenced elsewhere. Another core property is a record's C, which is a shorthand identifier used for local identification. For example: # Instead of specifying the uuid ticket show e4e5f9d8-ff7a-40c1-8c7f-2d6fcdd859ed # ...you can use the luid ticket show 9 =head2 Record overview =head3 The record object (Prophet::Record) A record object in Prophet is initially an empty husk. First, the record class is found. The default record class is L, but designating a custom class, one that extends from L is possible. Once the record class is found, the object is instantiated and passed the app_handle, handle, and type of the record. The record is now ready for use. =head3 Loading a record from the database Once you have a record object configured with a type and uuid, you can load data from the replica. This consists of asking the replica (handle) for the properties corresponding to the given record type and uuid. Currently, the record object does not actually store any data. Rather, it acts as a proxy to the replica. =head3 Saving a record to the database There is no save method corresponding to load. Properties are immediately saved to the replica once they are set. Before properties are sent to the replica, the record object is responsible for canonicalizing and validating them. =head2 Definining a property: declaring, defaulting, and recommending You can declare properties for a record by defining a C routine for a record. The routine should return a list of properties declared for the record type. Don't forget to return inherited properties! Here is an example: sub declared_props { return ('email', shift->SUPER::declared_props(@_)) } Prophet knows how to default a property by looking for a C method in the record class. If it finds one, it will pass the properties (not just the property to be defaulted) through in the form of a hashref. The returned value is the default value for the property. The default method is NOT triggered if the property value is already defined (not undef) Generating property defaults takes place during record creation. You can also recommend values for a property. Recommending values for a property is mainly used for validation. To recommend values for a property, define a C<_recommended_values_for_prop_$prop> routine in your record class. The routine should return a list of which is the range of values for the property. Here is an example of how SD uses value recommending to validate: # A globally defined "statuses" setting is specified in App::SD sub database_settings { ... statuses => ['24183C4D-EFD0-4B16-A207-ED7598E875E6' => qw/new open stalled closed rejected/], ... } ... # App::SD::Model::Ticket uses the "statuses" setting for recommended values sub _recommended_values_for_prop_status { return @{ shift->app_handle->setting( label => 'statuses' )->get() }; } ... # App::SD::Model::Ticket uses the recommended values for "status" to validate sub validate_prop_status { my ( $self, %args ) = @_; return $self->validate_prop_from_recommended_values( 'status', \%args ); } =head2 Property canonicalization Property canonicalization makes sure a property is in the right format. It includes trimmming leading and trailing whitespace, making sure text is in the right case, and more. Prophet knows how to canonicalize a property by looking for a C method in the record class. If it finds one, it will pass the properties (not just the named property) through in the form of a hashref to be canonicalized. =head2 Property validation Property validation makes sure a property has a valid value before committing it to the replica. Prophet knows how to validate a property by looking for a C method in the record class. If it finds one, it will pass the properties (not just the named property) through in the form of a hashref to be validated. If the validation routine makes note of an error, Prophet will abort with an exception (die). You can also ask Prophet to validate a property based on recommended values. =head1 Glossary =head2 Record A record is a collection of properties (much like an SQL table is a collection of columns). A record must have a type and uuid. It may also have an luid, which is like a uuid but only valid for the local environment/replica. =head2 Property A property is a name/value pair associated with a record. =head2 Collection A collection is used to search for and operate on records matching certain criteria. =head2 Replica (WIP) The database that a Prophet application works from. The local state of all the data. Alice keeps her most recent fetch of the database in her replica. The global state of all the data. The latest data that Alice and Bob have committed are in the database. =head1 FAQ =head2 Why doesn't Prophet use git or svn to track changes? The short answer: "The way you want to handle changes in a Bbase (for source code) are very different than the way you want to handle changes in a Bbase (for records and properties)" =head2 Does Prophet currently do sub-property (content-level) diffing? No it does not... yet. However, the conflict resolution in Prophet is pluggable, so it's a possibility =head2 What do C and C refer to? What's the difference between them? C is a reference to your application object, like an instance of C that extends from C If you're familliar with Catalyst, you'll recognize it as being similar to the C<$catalyst> instance C is a reference to your repository "database", depending on what kind of Replica you're using If you're familliar with DBI, you'll recognize it as the database handle that is returned when you connect to a database via C<< DBI->connect >> =head2 How is Prophet different from something like Google Gears or Adobe Air? While Gears and Air allow you to take cloud applications offline, they don't solve the data merging/synchronization problem. =head2 How does Prophet ensure that synchronized/shared data is valid? =cut 12:55 < grink> what is a good word for the datastore in Prophet? database? replica database? 12:55 < Sartak> database is the global state, replica is the local state 12:56 < Sartak> so you probably want just replica :) 12:56 < grink> sweeeeet 12:57 * grink edits out "storage-thingy" 13:15 < obra> local replica == local copy of the database 13:15 < obra> grink: sd is the main app on Prophet we're working on right now 13:17 < grink> isn't local replica redundant then? 13:18 < obra> not really 13:18 < obra> EVERY copy is a replica 13:18 < grink> or are you saying, "Alice's local state" and "Bob's local state" 13:18 < obra> my local replica is the one I'm working from 13:19 < grink> so the database is the theoretical merging of all the replicas, but no one would ever work from the "database" 13:19 < grink> you always interface through a replica 13:20 < obra> there isn't really "the database" 13:20 < obra> database is a word that describes any/all replicas. 13:20 < obra> every replica shares the same database uuid 13:20 < obra> so you don't merge your replica of the foo database into my replica of the bar database 13:21 < obra> it's kind of like cylons 13:21 < obra> there are many copies. 13:21 < grink> lol 13:21 < obra> they're sort of interchangable 13:21 < obra> but each comes from the same prototype 13:21 < obra> and clones of the same kind can get inside each others' heads much more easily 13:21 < obra> (This is ~why SD's milestones are named for cylons) prophet-0.750/lib/Prophet/Meta/000077500000000000000000000000001160607302300162615ustar00rootroot00000000000000prophet-0.750/lib/Prophet/Meta/Types.pm000066400000000000000000000010251160607302300177210ustar00rootroot00000000000000package Prophet::Meta::Types; use Any::Moose; use Any::Moose 'Util::TypeConstraints'; enum 'Prophet::Type::ChangeType' => qw/add_file add_dir update_file delete/; enum 'Prophet::Type::FileOpConflict' => qw/delete_missing_file update_missing_file create_existing_file create_existing_dir/; __PACKAGE__->meta->make_immutable; no Any::Moose; 1; __END__ =head1 NAME Prophet::Meta::Types - extra types for Prophet =head1 TYPES =head2 Prophet::Type::ChangeType A single change type: add_file, add_dir, update_file, delete. =cut prophet-0.750/lib/Prophet/PropChange.pm000066400000000000000000000016761160607302300177710ustar00rootroot00000000000000package Prophet::PropChange; use Any::Moose; has name => ( is => 'rw', isa => 'Str', ); has old_value => ( is => 'rw', isa => 'Str|Undef', ); has new_value => ( is => 'rw', isa => 'Str|Undef', ); =head1 NAME Prophet::PropChange =head1 DESCRIPTION This class encapsulates a single property change. =head1 METHODS =head2 name The name of the property we're talking about. =head2 old_value What L changed I. =head2 new_value What L changed I. =cut sub summary { my $self = shift; my $name = $self->name || '(property name missing)'; my $old = $self->old_value; my $new = $self->new_value; if (!defined($old)) { return qq{+ "$name" set to "}.($new||'').qq{"}; } elsif (!defined($new)) { return qq{- "$name" "$old" deleted.}; } return qq{> "$name" changed from "$old" to "$new".}; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Record.pm000066400000000000000000000550111160607302300171510ustar00rootroot00000000000000package Prophet::Record; use Any::Moose; use Params::Validate; use Term::ANSIColor; use Prophet::App; # for require_module. Kinda hacky use constant collection_class => 'Prophet::Collection'; =head1 NAME Prophet::Record =head1 DESCRIPTION This class represents a base class for any record in a Prophet database. =cut has app_handle => ( isa => 'Prophet::App|Undef', is => 'rw', required => 0, ); has handle => ( is => 'rw', required => 1, lazy => 1, default => sub { shift->app_handle->handle } ); has type => ( is => 'rw', isa => 'Str', predicate => 'has_type', required => 1, default => sub { undef} ); has uuid => ( is => 'rw', isa => 'Str', ); has luid => ( is => 'rw', isa => 'Str|Undef', lazy => 1, default => sub { my $self = shift; $self->find_or_create_luid; }, ); our $REFERENCES = {}; sub REFERENCES { $REFERENCES } our $PROPERTIES = {}; sub PROPERTIES { $PROPERTIES } =head1 METHODS =head2 new { handle => Prophet::Replica, type => $type } Instantiates a new, empty L of type $type. =head2 declared_props Returns a sorted list of the names of the record's declared properties. Declared properties are always validated even if the user provides no value for that prop. This can be used for such things as requiring records to have certain props in order to be created, for example. =cut sub declared_props { return sort keys %{ $_[0]->PROPERTIES }; } =head2 record_type Returns the record's type. =cut sub record_type { $_[0]->type } =head2 register_reference $class, $accessor, $foreign_class, @args Registers a reference to a foreign class to this record. The foreign class must be of type L or L, or else a fatal error is triggered. =cut sub register_reference { my ( $class, $accessor, $foreign_class, @args ) = @_; Prophet::App->require($foreign_class); if ( $foreign_class->isa('Prophet::Collection') ) { return $class->register_collection_reference( $accessor => $foreign_class, @args ); } elsif ( $foreign_class->isa('Prophet::Record') ) { return $class->register_record_reference( $accessor => $foreign_class, # default the lookup property to be the name of the accessor by => $accessor, @args ); } else { die "Your foreign class ($foreign_class) must be a subclass of Prophet::Record or Prophet::Collection"; } } =head2 register_collection_reference $accessor, $collection_class, by => $key_in_model Registers and creates an accessor in the current class to the associated collection C<$collection_class>, which refers to the current class by C<$key_in_model> in the model class of C<$collection_class>. =cut sub register_collection_reference { my ( $class, $accessor, $collection_class, @args ) = @_; my %args = validate( @args, { by => 1 } ); no strict 'refs'; Prophet::App->require( $collection_class->record_class ); *{ $class . "::$accessor" } = sub { my $self = shift; my $collection = $collection_class->new( app_handle => $self->app_handle, ); $collection->matching( sub { ($_[0]->prop( $args{by} )||'') eq $self->uuid } ); return $collection; }; # XXX: add validater for $args{by} in $model->record_class $class->REFERENCES->{$class}{$accessor} = { %args, arity => 'collection', type => $collection_class->record_class, }; } =head2 register_record_reference $accessor, $record_class, by => $key_in_model Registers and creates an accessor in the current class to the associated record C<$record_class>, which refers to the current class by C<$key_in_model> in the model class of C<$collection_class>. =cut sub register_record_reference { my ( $class, $accessor, $record_class, @args ) = @_; my %args = validate( @args, { by => 1 } ); no strict 'refs'; Prophet::App->require( $record_class ); *{ $class . "::$accessor" } = sub { my $self = shift; my $record = $record_class->new( app_handle => $self->app_handle, handle => $self->handle, ); $record->load(uuid => $self->prop($args{by})); return $record; }; # XXX: add validater for $args{by} in $model->record_class $class->REFERENCES->{$class}{$accessor} = { %args, arity => 'scalar', type => $record_class, }; } =head2 create { props => { %hash_of_kv_pairs } } Creates a new Prophet database record in your database. Sets the record's properties to the keys and values passed in. Automatically canonicalizes and then validates the props. Upon successful creation, returns the new record's C. In case of failure, returns undef. =cut sub create { my $self = shift; my %args = validate( @_, { props => 1 } ); my $uuid = $self->handle->uuid_generator->create_str; my $props = $args{props}; $self->default_props($props); $self->canonicalize_props($props); # XXX TODO - this should be a real exception return undef unless (keys %$props); $self->validate_props($props) or return undef; $self->_create_record(props => $props, uuid => $uuid); } # _create_record is a helper routine, used both by create and by databasesetting::create sub _create_record { my $self = shift; my %args = validate( @_, { props => 1, uuid => 1 } ); $self->uuid($args{uuid}); $self->handle->create_record( props => $args{'props'}, uuid => $self->uuid, type => $self->type ); return $self->uuid; } =head2 load { uuid => $UUID } or { luid => $UUID } Given a UUID or LUID, look up the LUID or UUID (the opposite of what was given) in the database. Set this record's LUID and UUID attributes, and return the LUID or UUID (whichever wasn't given in the method call). Returns undef if the record doesn't exist in the database. =cut sub load { my $self = shift; my %args = validate( @_, { uuid => { optional => 1, callbacks => { 'uuid or luid present' => sub { $_[0] || $_[1]->{luid} }, }, }, luid => { optional => 1, callbacks => { 'luid or uuid present' => sub { $_[0] || $_[1]->{uuid} }, }, }, } ); if ( $args{luid} ) { $self->luid( $args{luid} ); $self->uuid( $self->handle->find_uuid_by_luid( luid => $args{luid} ) ); return($self->uuid) if ($self->uuid); } else { $self->uuid( $args{uuid} ); $self->luid( $self->handle->find_or_create_luid( uuid => $args{uuid})); return($self->luid) if ($self->luid); } return undef; } # a private method to let collection search results instantiate records more quickly # (See Prophet::Replica::sqlite) sub _instantiate_from_hash { my $self = shift; my %args = ( uuid => undef, luid => undef, @_); # we might not have a luid cheaply (see the prophet filesys backend) $self->luid($args{'luid'}) if (defined $args{'luid'}); # We _Always_ have a luid $self->uuid($args{'uuid'}); # XXX TODO - expect props as well } sub loaded { my $self = shift; return $self->uuid ? 1 : 0; } =head2 set_prop { name => $name, value => $value } Updates the current record to set an individual property called C<$name> to C<$value> This is a convenience method around L. =cut sub set_prop { my $self = shift; my %args = validate( @_, { name => 1, value => 1 } ); my $props = { $args{'name'} => $args{'value'} }; $self->set_props( props => $props ); } =head2 set_props { props => { key1 => val1, key2 => val2} } Updates the current record to set all the keys contained in the C parameter to their associated values. Automatically canonicalizes and validates the props in question. In case of failure, returns false. On success, returns true. =cut sub set_props { my $self = shift; my %args = validate( @_, { props => 1 } ); confess "set_props called on a record that hasn't been loaded or created yet." if !$self->uuid; $self->canonicalize_props( $args{'props'} ); $self->validate_props( $args{'props'} ) || return undef; return 0 unless grep { defined } values %{$args{props}}; $self->handle->set_record_props( type => $self->type, uuid => $self->uuid, props => $args{'props'} ); return 1; } =head2 get_props Returns a hash of this record's properties as currently set in the database. =cut sub get_props { my $self = shift; confess "get_props called on a record that hasn't been loaded or created yet." if !$self->uuid; return $self->handle->get_record_props( uuid => $self->uuid, type => $self->type) || {}; } =head2 exists When called on a loaded record, returns true if the record exists and false if it does not. =cut sub exists { my $self = shift; return $self->handle->record_exists( uuid => $self->uuid, type => $self->type); } =head2 prop $name Returns the current value of the property C<$name> for this record. (This is a convenience method wrapped around L). =cut sub prop { my $self = shift; my $prop = shift; return $self->get_props->{$prop}; } =head2 delete_prop { name => $name } Deletes the current value for the property $name. (This is currently equivalent to setting the prop to ''.) =cut sub delete_prop { my $self = shift; my %args = validate( @_, { name => 1 } ); confess "delete_prop called on a record that hasn't been loaded or created yet." if !$self->uuid; $self->set_prop(name => $args{'name'}, value => ''); # $self->handle->delete_record_prop( # uuid => $self->uuid, # name => $args{'name'} # ); } =head2 delete Deletes this record from the database. (Note that it does _not_ purge historical versions of the record) =cut sub delete { my $self = shift; delete $self->{props}; $self->handle->delete_record( type => $self->type, uuid => $self->uuid ); } =head2 changesets { limit => $int } Returns an ordered list of changeset objects for all changesets containing changes to the record specified by this record object. Note that changesets may include changes to other records. If a limit is specified, this routine will only return that many changesets, starting from the changeset containing the record's creation. =cut sub changesets { my $self = shift; my %args = validate(@_, { limit => 0}); return $self->handle->changesets_for_record( uuid => $self->uuid, type => $self->type, $args{limit} ? (limit => $args{limit}) : () ); } =head2 changes Returns an ordered list of all the change objects that represent changes to the record specified by this record object. =cut sub changes { my $self = shift; my $uuid = $self->uuid; my @changesets = $self->changesets; return grep { $_->record_uuid eq $uuid } map { $_->changes } @changesets; } =head2 uniq @list The C function (taken from version 0.21). Returns a new list by stripping duplicate values in @list. The order of elements in the returned list is the same as in @list. In scalar context, returns the number of unique elements in @list. my @x = uniq 1, 1, 2, 2, 3, 5, 3, 4; # returns 1 2 3 5 4 my $x = uniq 1, 1, 2, 2, 3, 5, 3, 4; # returns 5 =cut sub uniq (@) { my %h; map { $h{$_}++ == 0 ? $_ : () } @_; } =head2 validate_props $propsref Takes a reference to a props hash and validates each prop in the hash or in the C attribute that has a validation routine (C). Dies if any prop fails validation. Returns true on success. Returns false if any prop is not allowable (prop name fails validation). =cut sub validate_props { my $self = shift; my $props = shift; my $errors = {}; my @errors; for my $key ( uniq( keys %$props, $self->declared_props ) ) { return undef unless ( $self->_validate_prop_name($key) ); if ( my $sub = $self->can( 'validate_prop_' . $key ) ) { $sub->( $self, props => $props, errors => $errors ) || push @errors,"Validation error for '$key': " . ( $errors->{$key} || '' ) . '.'; } } if (@errors) { die join( "\n", @errors )."\n"; } return 1; } =head2 _validate_prop_name A hook to allow forcing users to only use certain prop names. Currently just returns true for all inputs. =cut sub _validate_prop_name {1} =head2 canonicalize_props $propsref Takes a hashref to a props hash and canonicalizes each one if a C routine is available. Returns true on completion. =cut sub canonicalize_props { my $self = shift; my $props = shift; my $errors = {}; for my $key ( uniq( keys %$props, $self->declared_props ) ) { $self->canonicalize_prop($key, $props, $errors); } return 1; } sub canonicalize_prop { my $self = shift; my $prop = shift; my $props = shift; my $errors = shift; if ( my $sub = $self->can( 'canonicalize_prop_' . $prop ) ) { $sub->( $self, props => $props, errors => $errors ); return 1; } return 0; } =head2 default_props $props_ref Takes a reference to a hash of props and looks up the defaults for those props, if they exist (by way of C routines). Sets the values of the props in the hash to the defaults. =cut sub default_props { my $self = shift; my $props = shift; my @methods = grep { /^default_prop_/ } $self->meta->get_all_method_names; for my $method (@methods) { my ($key) = $method =~ /^default_prop_(.+)$/; $props->{$key} = $self->$method(props => $props) if !defined($props->{$key}); } return 1; } =head2 default_prop_creator Default the creator of every record to the changeset_creator (usually the current user's email address.) =cut sub default_prop_creator { my $self = shift; return $self->handle->changeset_creator; } =head2 default_prop_original_replica Default the original_replica of every record to the replica's uuid. =cut sub default_prop_original_replica { my $self = shift; return $self->handle->uuid; } =head2 validate_prop_from_recommended_values 'prop', $argsref Checks to see if the given property has a valid value and returns true if so. If not, adds an error message to $argsref->{errors}{prop} and returns false. =cut sub validate_prop_from_recommended_values { my $self = shift; my $prop = shift; my $args = shift; if ( my @options = $self->recommended_values_for_prop($prop) ) { return 1 if ((scalar grep { $args->{props}{$prop} eq $_ } @options) # force-set props with ! to bypass validation || $args->{props}{$prop} =~ s/!$//); $args->{errors}{$prop} = "'" . $args->{props}->{$prop} . "' is not a valid $prop"; return 0; } return 1; } =head2 recommended_values_for_prop 'prop' Given a record property, return an array of the values that should usually be associated with this property. If a property doesn't have a specific range of values, undef is returned. This is mainly intended for use in prop validation (see L). Recommended values for a prop are set by defining methods called C<_recommended_values_for_prop_$prop> in application modules that inherit from L. =cut sub recommended_values_for_prop { my $self = shift; my $prop = shift; if (my $code = $self->can("_recommended_values_for_prop_".$prop)) { $code->($self, @_); } else { return undef; } } =head2 _default_summary_format A string of the default summary format for record types that do not define their own summary format. A summary format should consist of format_string,field pairs, separated by | characters. Fields that are not property names must start with the C<$> character and be handled in the C routine. Example: C<'%s,$luid | %s,summary | %s,status'> =cut sub _default_summary_format { undef } =head2 _summary_format Tries to find the summary format for the record type. Returns L<_default_summary_format> if nothing better can be found. =cut sub _summary_format { my $self = shift; return $self->app_handle->config->get( key => $self->type.'.summary-format' ) || $self->app_handle->config->get( key => 'record.summary-format' ) || $self->_default_summary_format; } =head2 _atomize_summary_format [$format] Splits a summary format into pieces (separated by arbitrary whitespace and the | character). Returns the split list. If no summary format is supplied, this routine attempts to find one by calling L<_summary_format>. =cut sub _atomize_summary_format { my $self = shift; my $format = shift || $self->_summary_format; return undef unless $format; return split /\s*\|\s*/, $format; } =head2 _parse_format_summary Parses the summary format for this record's type (or the default summary format if no type-specific format exists). Returns a list of hashrefs to hashes which contain the following keys: C, C, C, and C (These are the format string, the property to be formatted, the value of that property, and the atom formatted according to C, respectively.) If no format string is supplied in a given format atom, C<%s> is used. If a format atom C<$value>'s value does not start with a C<$> character, it is swapped with the value of the prop C<$value> (or the string "(no value)". All values are filtered through the function C. =cut sub _parse_format_summary { my $self = shift; my $props = $self->get_props; my @out; for my $atom ($self->_atomize_summary_format) { my %atom_data; my ($format, $prop, $value, $color); if ($atom =~ /,/) { ($format, $prop, $color) = split /,/, $atom; $value = $prop; unless ($value =~ /^\$/) { $value = $props->{$value} || "-" } } else { $format = '%s'; $prop = $value = $atom; } my $atom_value = $self->atom_value($value); push @out, { format => $format, prop => $prop, value => $atom_value, formatted => $self->format_atom( $format, $atom_value, $color ), }; } return @out; } =head2 format_summary Returns a formatted string that is the summary for the record. In an array context, returns a list of =cut sub format_summary { my $self = shift; my @out = $self->_summary_format ? $self->_parse_format_summary : $self->_format_all_props_raw; return @out if wantarray; return join ' ', map { $_->{formatted} } @out; } sub _format_all_props_raw { my $self = shift; my $props = $self->get_props; my @out; push @out, { prop => 'uuid', value => $self->uuid, format => '%s', formatted => "'uuid': '" . $self->uuid . "'" }; push @out, { prop => 'luid', value => $self->luid, format => '%s', formatted => "'luid': '" . $self->luid . "'" }; for my $prop ( keys %$props ) { push @out, { prop => $prop, value => $props->{$prop}, format => '%s', formatted => "'$prop': '" . $props->{$prop} . "'" }; } return @out; } =head2 atom_value $value_in Takes an input value from a summary format atom and returns either its output value or itself (because it is a property and its value should be retrieved from the props attribute instead). For example, an input value of "$uuid" would return the record object's C field. =cut sub atom_value { my $self = shift; my $value_in = shift || ''; if ($value_in =~ /^\$[gu]uid/) { return $self->uuid; } elsif ($value_in eq '$luid') { return $self->luid; } return $value_in; } =head2 format_atom $string => $value Takes a format string / value pair and returns a formatted string for printing. Dies with a message if there's an error in the format string that sprintf warn()s on. =cut sub format_atom { my ($self, $string, $value, $color) = @_; my $formatted_atom; eval { use warnings FATAL => 'all'; # sprintf only warns on errors $formatted_atom = sprintf($string, $self->atom_value($value)); }; if ( $@ ) { chomp $@; die "Error: cannot format value '".$self->atom_value($value) ."' using atom '".$string."' in '".$self->type."' summary format\n\n" ."Check that the ".$self->type.".summary-format config variable in your config\n" ."file is valid. If this variable is not set, this is a bug in the default\n" ."summary format for this ticket type.\n\n" ."The error encountered was:\n\n'" . $@ . "'\n"; } return $color ? colored($formatted_atom, $color) : $formatted_atom; } =head2 find_or_create_luid Finds the luid for the records uuid, or creates a new one. Returns the luid. =cut sub find_or_create_luid { my $self = shift; my $luid = $self->handle->find_or_create_luid( uuid => $self->uuid ); $self->luid($luid); return $luid; } =head2 history_as_string Returns this record's changesets as a single string. =cut sub history_as_string { my $self = shift; my $out =''; for my $changeset ($self->changesets) { $out .= $changeset->as_string(change_filter => sub { shift->record_uuid eq $self->uuid }); } return $out; } =head2 record_reference_methods Returns a list of method names that refer to other individual records =cut sub record_reference_methods { my $self = shift; my $class = blessed($self) || $self; my %accessors = %{ $self->REFERENCES->{$class} || {} }; return grep { $accessors{$_}{arity} eq 'record' } keys %accessors; } =head2 collection_reference_methods Returns a list of method names that refer to collections =cut sub collection_reference_methods { my $self = shift; my $class = blessed($self) || $self; my %accessors = %{ $self->REFERENCES->{$class} || {} }; return grep { $accessors{$_}{arity} eq 'collection' } keys %accessors; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Replica.pm000066400000000000000000001031171160607302300173130ustar00rootroot00000000000000package Prophet::Replica; use Any::Moose; use Params::Validate qw(:all); use File::Spec (); use File::Path qw/mkpath/; use constant state_db_uuid => 'state'; use Prophet::App; has metadata_store => ( is => 'rw', isa => 'Prophet::MetadataStore', documentation => 'Where metadata about other replicas is stored.', ); has resolution_db_handle => ( is => 'rw', isa => 'Prophet::Replica', documentation => 'Where conflict resolutions are stored.', ); has is_resdb => ( is => 'rw', isa => 'Bool', documentation => 'Whether this replica is a resolution db or not.' ); has db_uuid => ( is => 'rw', isa => 'Str', documentation => 'The uuid of this replica.', ); sub set_db_uuid { shift->db_uuid(@_) } has url => ( is => 'rw', isa => 'Str', documentation => 'Where this replica comes from.', ); has app_handle => ( is => 'ro', isa => 'Prophet::App', weak_ref => 1, predicate => 'has_app_handle', ); has after_initialize => ( is => 'rw', isa => 'CodeRef', default => sub { sub {1} } # default returns a coderef ); has uuid_generator => ( is => 'rw', isa => 'Prophet::UUIDGenerator', lazy => 1, default => sub { my $self = shift; my $ug = Prophet::UUIDGenerator->new( uuid_scheme => 2 ); return $ug; } ); our $MERGETICKET_METATYPE = '_merge_tickets'; =head1 NAME Prophet::Replica =head1 DESCRIPTION A base class for all Prophet replicas. =head1 METHODS =head3 get_handle Determines what replica class to use and instantiates it. Returns the new replica object. =cut sub get_handle { my $class = shift; my %args = @_ == 1 ? %{ $_[0] } : @_; my ( $new_class, $scheme, $url ) = $class->_url_to_replica_class(%args); if ( !$new_class ) { $class->log_fatal( "I don't know how to handle the replica URL you provided - '@{[ $args{url}]}'." ."\nIs your syntax correct?" ); } Prophet::App->require($new_class); my $handle = $new_class->new(%args); if ($handle->replica_exists && $handle->db_uuid) { $handle->uuid_generator->set_uuid_scheme($handle->db_uuid); } return $handle; } sub initialize { my $self = shift; my %args = validate( @_, { db_uuid => 0, replica_uuid => 0, resdb_uuid => 0, resdb_replica_uuid => 0, } ); if ( !$self->fs_root_parent ) { if ( $self->can_write_changesets ) { die "We can only create local prophet replicas. It looks like you're trying to create " . $self->url; } else { die "Prophet couldn't find a replica at \"" . $self->url . "\"\n\n" . "Please check the URL and try again.\n"; } } return undef if $self->replica_exists; $self->uuid_generator->set_uuid_scheme($args{'db_uuid'}) if ($args{db_uuid}); for ( $self->_on_initialize_create_paths ) { mkpath( [ File::Spec->catdir( $self->fs_root => $_ ) ] ); } $self->initialize_backend(%args); $self->after_initialize->($self); } =head2 store_local_metadata KEY => VALUE Takes a key and a value. Store some bit of metadata in a durable local datastore. Metadata isn't propagated when replicas are synced. Returns true or false. =cut =head2 fetch_local_metadata KEY Takes a scalar key. Fetches a bit of metadata from the local metadata store. Returns the value of the key found in the local metadata store. Returns undef if there's no value for the key in the local metadata store. =cut sub replica_exists { return 1; # XXX TODO HACK } sub can_initialize { return undef; } =head3 _url_to_replica_class Returns the replica class for the given url based on its scheme. =cut sub _url_to_replica_class { my $self = shift; my %args = (@_); my $url = $args{'url'}; my ( $scheme, $real_url ) = $url =~ /^([^:]*?):(.*)$/; return undef unless $scheme; for my $class ( ref( $args{app_handle} ) . "::Replica::" . $scheme, "Prophet::Replica::".$scheme ) { Prophet::App->try_to_require($class) || next; return ( $class, $scheme, $real_url ); } return undef; } =head3 import_changesets { from => L ... } Given a L to import changes from, traverse all the changesets we haven't seen before and integrate them into this replica. This routine calls L on the 'from' replica, passing in the most recent changeset the current replica has seen and a callback routine which calls L on the local replica. That callback itself takes a callback, L , which a replica implementation can use to perform some action after a changeset is integrated into a peer. L takes a paramhash, currently with only a single key, 'changeset'. =cut sub import_changesets { my $self = shift; my %args = validate( @_, { from => { isa => 'Prophet::Replica' }, resdb => { optional => 1 }, resolver => { optional => 1 }, resolver_class => { optional => 1 }, conflict_callback => { type => CODEREF, optional => 1 }, reporting_callback => { type => CODEREF, optional => 1 }, force => { optional => 1 }, } ); my $source = $args{'from'}; $self->_check_db_uuids_on_merge(for => $source, force => $args{'force'}); warn "The source (@{[$source->url]}) does not exist" unless ($source->replica_exists); $self->log_debug("Integrating changesets from ".$source->uuid. " after ". $self->last_changeset_from_source( $self->uuid )); $source->traverse_changesets( after => $self->last_changeset_from_source( $source->uuid ), before_load_changeset_callback => sub { my %args = (@_); my ($seq, $orig_uuid, $orig_seq, $key) = @{$args{changeset_metadata}}; # skip changesets we've seen before if ( $self->has_seen_changeset( source_uuid => $orig_uuid, sequence_no => $orig_seq) ){ # If we've seen the changeset, yet we still got here, it # means we saw it by original # replica/sequence pair, but not the direct upstream's # uuid/sequence pair. # recording that can help performance a whole bunch for # next sync if ($source->uuid && $seq && $seq > $self->last_changeset_from_source($source->uuid)) { $self->record_last_changeset_from_replica( $source->uuid => $seq); } return undef; } else { return 1; } }, callback => sub { my %callback_args = (@_); $self->integrate_changeset( changeset => $callback_args{changeset}, conflict_callback => $args{'conflict_callback'}, reporting_callback => $args{'reporting_callback'}, resolver => $args{'resolver'}, resolver_class => $args{'resolver_class'}, resdb => $args{'resdb'}, ); if ( ref( $callback_args{'after_integrate_changeset'} ) ) { $callback_args{'after_integrate_changeset'}->( changeset => $callback_args{'changeset'} ); } } ); } =head3 import_resolutions_from_remote_replica { from => L ... } Takes a L object (and possibly some optional arguments) and imports its resolution changesets into this replica's resolution database. Returns immediately if either the source replica or the target replica lack a resolution database. =cut sub import_resolutions_from_remote_replica { my $self = shift; my %args = validate( @_, { from => { isa => 'Prophet::Replica' }, resolver => { optional => 1 }, resolver_class => { optional => 1 }, conflict_callback => { optional => 1 }, force => { optional => 1 }, } ); my $source = $args{'from'}; return unless $self->resolution_db_handle; return unless $source->resolution_db_handle; $self->resolution_db_handle->import_changesets( from => $source->resolution_db_handle, resolver => sub { die "not implemented yet" }, force => $args{force}, ); } =head3 integrate_changeset L Given a L, integrate each and every change within that changeset into the handle's replica. If there are conflicts, generate a nullification change, figure out a conflict resolution and apply the nullification, original change and resolution all at once (as three separate changes). If there are no conflicts, just apply the change. This routine also records that we've seen this changeset (and hence everything before it) from both the peer who sent it to us AND the replica which originally created it. =cut sub integrate_changeset { my $self = shift; my %args = validate( @_, { changeset => { isa => 'Prophet::ChangeSet' }, resolver => { optional => 1 }, resolver_class => { optional => 1 }, resdb => { optional => 1 }, conflict_callback => { optional => 1 }, reporting_callback => { optional => 1 } } ); my $changeset = $args{'changeset'}; $self->log_debug("Considering changeset ".$changeset->original_sequence_no . " from " . $self->display_name_for_replica($changeset->original_source_uuid)); # when we start to integrate a changeset, we need to do a bit of housekeeping # We never want to merge in: # - merge tickets that describe merges from the local record # When we integrate changes, sometimes we will get handed changes we # already know about. # - changes from local # - changes from some other party we've merged from # - merge tickets for the same # we'll want to skip or remove those changesets if ( !$self->should_accept_changeset($changeset) ) { # if it's a changeset we don't care about, mark it as seen and move on $self->record_integration_of_changeset($changeset); $args{'reporting_callback'}->( changeset => $changeset, ) if ( $args{'reporting_callback'} ); return; } elsif ( my $conflict = $self->conflicts_from_changeset($changeset) ) { $self->log_debug( "Integrating conflicting changeset " . $changeset->original_sequence_no . " from " . $self->display_name_for_replica( $changeset->original_source_uuid ) ); $args{conflict_callback}->($conflict) if $args{'conflict_callback'}; $conflict->resolvers( [ sub { $args{resolver}->(@_) } ] ) if $args{resolver}; if ( $args{resolver_class} ) { Prophet::App->require( $args{resolver_class} ) || die $@; $conflict->resolvers( [ sub { $args{resolver_class}->new->run(@_); } ] ); } my $resolutions = $conflict->generate_resolution( $args{resdb} ); #figure out our conflict resolution # IMPORTANT: these should be an atomic unit. dying here would be poor. # BUT WE WANT THEM AS THREE DIFFERENT CHANGESETS # integrate the nullification change $self->record_changes( $conflict->nullification_changeset ); # integrate the original change $self->record_changeset_and_integration($changeset); # integrate the conflict resolution change $self->record_resolutions( $conflict->resolution_changeset ); $args{'reporting_callback'}->( changeset => $changeset, conflict => $conflict ) if ( $args{'reporting_callback'} ); return 1; } else { $self->log_debug("Integrating changeset ". $changeset->original_sequence_no . " from " . $self->display_name_for_replica($changeset->original_source_uuid)); $self->record_changeset_and_integration($changeset); $args{'reporting_callback'}->( changeset => $changeset ) if ( $args{'reporting_callback'} ); return 1; } } =head3 record_changeset_and_integration L Given a L, integrate each and every change within that changeset into the handle's replica. If the state handle is in the middle of an edit, the integration of this changeset is recorded as part of that edit; if not, it is recorded as a new edit. =cut sub record_changeset_and_integration { my $self = shift; my $changeset = shift; $self->begin_edit(source => $changeset); $self->record_changes($changeset); $self->record_integration_of_changeset($changeset); $self->_set_original_source_metadata_for_current_edit($changeset); $self->commit_edit; return; } =head3 last_changeset_from_source $SOURCE_UUID Returns the last changeset id seen from the replica identified by $SOURCE_UUID. =cut sub last_changeset_from_source { my $self = shift; my ($source) = validate_pos( @_, { type => SCALAR } ); my $changeset_num = $self->fetch_local_metadata('last-changeset-from-'.$source); # 0 is a valid changeset # return defined $changeset_num ? $changeset_num : -1; } =head3 has_seen_changeset { source_uuid => , sequence_no => } Returns true if we've previously integrated this changeset, even if we originally received it from a different peer. =cut sub has_seen_changeset { my $self = shift; my %args = validate( @_, {source_uuid => 1, sequence_no => 1}); $self->log_debug("Checking to see if we've ever seen changeset " . $args{sequence_no} . " from " . $self->display_name_for_replica($args{source_uuid})); $self->log_debug("Last changeset from source: " . $self->last_changeset_from_source($args{source_uuid})); # If the changeset originated locally, we never want it if (lc($args{source_uuid}) eq lc($self->uuid) ) { $self->log_debug("\t - We have. (It originated locally.)"); return 1; } # Otherwise, if the we have a merge ticket from the source, we don't want # the changeset if the source's sequence # is >= the changeset's sequence # #, we can safely skip it elsif ( $self->last_changeset_from_source( $args{source_uuid} ) >= $args{sequence_no} ) { $self->log_debug("\t - We have seen this or a more recent changeset from remote."); return 1; } else { return undef; } } =head3 changeset_will_conflict L Returns true if any change that's part of this changeset won't apply cleanly to the head of the current replica. =cut sub changeset_will_conflict { my $self = shift; my ($changeset) = validate_pos( @_, { isa => "Prophet::ChangeSet" } ); return 1 if ( $self->conflicts_from_changeset($changeset) ); return undef; } =head3 conflicts_from_changeset L Returns a L object if the supplied L will generate conflicts if applied to the current replica. Returns undef if the current changeset wouldn't generate a conflict. =cut sub conflicts_from_changeset { my $self = shift; my ($changeset) = validate_pos( @_, { isa => "Prophet::ChangeSet" } ); require Prophet::Conflict; my $conflict = Prophet::Conflict->new( { changeset => $changeset, prophet_handle => $self} ); $conflict->analyze_changeset(); return undef unless $conflict->has_conflicting_changes; $self->log_debug("Conflicting changeset: ". JSON::to_json($conflict, {allow_blessed => 1})); return $conflict; } sub _check_db_uuids_on_merge { my $self = shift; my %args = validate( @_, { for => { isa => 'Prophet::Replica' }, force => 0, }); if ( $self->db_uuid && $args{for}->db_uuid && $self->db_uuid ne $args{for}->db_uuid ) { unless ( $args{'force'} ) { die "You are trying to merge two different databases! This is NOT\n" . "recommended. If you really want to do this, add '--force' to\n" . "your commandline.\n\n" . "Local database: " . $self->db_uuid . "\n" . "Remote database: " . $args{for}->db_uuid . "\n"; } } } =head3 should_accept_changeset { from => L, changeset => L } Returns true if this replica should integrate C, false otherwise. =cut sub should_accept_changeset { my $self = shift; my ($changeset) = validate_pos( @_, { changeset => { isa => 'Prophet::ChangeSet' } }); $self->log_debug("Should I accept " .$changeset->original_sequence_no . " from ". $self->display_name_for_replica($changeset->original_source_uuid)); if ( !$changeset->has_changes || $changeset->is_nullification || $changeset->is_resolution || $self->has_seen_changeset( sequence_no => $changeset->original_sequence_no, source_uuid => $changeset->original_source_uuid ) ) { return 0; } else { $self->log_debug("Yes, it has changes, isn't a nullification ". "and I haven't seen it before"); return 1; } } =head3 fetch_changesets { after => SEQUENCE_NO } Fetch all changesets from this replica after the local sequence number SEQUENCE_NO. Returns a reference to an array of L objects. See also L for replica implementations to provide streamly interface. =cut sub fetch_changesets { my $self = shift; my %args = validate( @_, { after => 1 } ); my @results; $self->traverse_changesets( %args, callback => sub { my %args = @_; push @results, $args{changeset} } ); return \@results; } =head2 methods to be implemented by a replica backend =head3 uuid Returns this replica's uuid. =cut sub uuid {} =head3 latest_sequence_no Returns the sequence # of the most recently committed changeset. =cut sub latest_sequence_no { return undef } =head3 find_or_create_luid { uuid => UUID } Finds or creates a LUID for the given UUID. =cut sub find_or_create_luid { my $self = shift; my %args = validate( @_, { uuid => 1 } ); my $mapping = $self->_read_guid2luid_mappings; if (!exists($mapping->{ $args{'uuid'} })) { $mapping->{ $args{'uuid'} } = $self->_create_luid($mapping); $self->_write_guid2luid_mappings($mapping); } return $mapping->{ $args{'uuid'} }; } sub find_luid_by_uuid { my $self = shift; my %args = validate( @_, { uuid => 1 } ); my $mapping = $self->_read_guid2luid_mappings; if (!exists($mapping->{ $args{'uuid'} })) { return undef; } return $mapping->{ $args{'uuid'} }; } =head3 find_uuid_by_luid { luid => LUID } Finds the UUID for the given LUID. Returns C if the LUID is not known. =cut sub find_uuid_by_luid { my $self = shift; my %args = validate( @_, { luid => 1 } ); my $mapping = $self->_read_luid2guid_mappings; return $mapping->{ $args{'luid'} }; } =head3 _create_luid ( 'uuid' => 'luid' ) Given a UUID => LUID hash mapping, return a new unused LUID (one higher than the mapping's current highest luid). =cut sub _create_luid { my $self = shift; my $map = shift; return ++$map->{'_meta'}{'maximum_luid'}; } =head3 _do_userdata_read $PATH $DEFAULT Returns a reference to the parsed JSON contents of the file given by C<$PATH> in the replica's userdata directory. Returns C<$DEFAULT> if the file does not exist. =cut sub _do_userdata_read { my $self = shift; my $path = shift; my $default = shift; my $json = $self->read_userdata( path => $path ) || $default; require JSON; return JSON::from_json($json, { utf8 => 1 }); } =head3 _do_userdata_write $PATH $VALUE serializes C<$VALUE> to JSON and writes it to the file given by C<$PATH> in the replica's userdata directory, creating parent directories as necessary. =cut sub _do_userdata_write { my $self = shift; my $path = shift; my $value = shift; require JSON; my $content = JSON::to_json($value, { canonical => 1, pretty => 0, utf8 => 1 }); $self->write_userdata( path => $path, content => $content, ); } =head3 _upstream_replica_cache_file A string representing the name of the file where replica URLs that have been previously pulled from are cached. =cut sub _upstream_replica_cache_file { "upstream-replica-cache" } =head3 _read_cached_upstream_replicas Returns a list of cached upstream replica URLs, or an empty list if there are no cached URLs. =cut sub _read_cached_upstream_replicas { my $self = shift; return @{ $self->_do_userdata_read( $self->_upstream_replica_cache_file, '[]' ) || [] }; } =head3 _write_cached_upstream_replicas @REPLICAS writes the replica URLs given by C<@REPLICAS> to the upstream replica cache file. =cut sub _write_cached_upstream_replicas { my $self = shift; my @replicas = @_; return $self->_do_userdata_write( $self->_upstream_replica_cache_file, [@replicas] ); } =head3 _guid2luid_file The file in the replica's userdata directory which contains a serialized JSON UUID => LUID hash mapping. =cut sub _guid2luid_file { "local-id-cache" } =head3 _read_guid2luid_mappings Returns a UUID => LUID hashref for this replica. =cut sub _read_guid2luid_mappings { my $self = shift; return $self->_do_userdata_read( $self->_guid2luid_file, '{}' ); } =head3 _write_guid2luid_mappings ( 'uuid' => 'luid' ) Writes the given UUID => LUID hash map to C as serialized JSON. =cut sub _write_guid2luid_mappings { my $self = shift; my $map = shift; return $self->_do_userdata_write( $self->_guid2luid_file, $map ); } =head3 _read_luid2guid_mappings Returns a LUID => UUID hashref for this replica. =cut sub _read_luid2guid_mappings { my $self = shift; my $guid2luid = $self->_read_guid2luid_mappings(@_); delete $guid2luid->{'_meta'}; my %luid2guid = reverse %$guid2luid; return \%luid2guid; } =head3 traverse_changesets { after => SEQUENCE_NO, until => SEQUENCE_NO, callback => sub { my %data = (changeset => undef, @_} } Walk through each changeset in the replica after SEQUENCE_NO, calling the C for each one in turn. =cut sub traverse_changesets { my $class = blessed($_[0]); Carp::confess "$class has failed to implement a 'traverse_changesets' method for their replica type."; } =head3 can_read_changesets Returns true if this source is one we know how to read from (and have permission to do so). =cut sub can_read_changesets { undef } =head3 can_write_changesets Returns true if this source is one we know how to write to (and have permission to write to). Returns false otherwise. =cut sub can_write_changesets { undef } =head3 record_resolutions L Given a resolution changeset, record all the resolution changesets as well as resolution records in the local resolution database. Called ONLY on local resolution creation. (Synced resolutions are just synced as records.) =cut sub record_resolutions { my $self = shift; my ($changeset) = validate_pos(@_, { isa => 'Prophet::ChangeSet'}); $self->_unimplemented("record_resolutions (since there is no writable handle)") unless ($self->can_write_changesets); # If we have a resolution db handle, record the resolutions there. # Otherwise, record them locally my $res_handle = $self->resolution_db_handle || $self; return unless $changeset->has_changes; $self->begin_edit(source => $changeset); $self->record_changes($changeset); $res_handle->_record_resolution($_) for $changeset->changes; $self->commit_edit(); } =head3 _record_resolution L Called ONLY on local resolution creation. (Synced resolutions are just synced as records.) =cut sub _record_resolution { my $self = shift; my ($change) = validate_pos(@_, { isa => 'Prophet::Change'}); return 1 if $self->record_exists( uuid => $self->uuid, type => '_prophet_resolution-' . $change->resolution_cas ); $self->create_record( uuid => $self->uuid, type => '_prophet_resolution-' . $change->resolution_cas, props => { _meta => $change->change_type, map { $_->name => $_->new_value } $change->prop_changes } ); } =head2 routines dealing with integrating changesets into a replica =head3 record_changes L Inside an edit (transaction), integrate all changes in this changeset and then call the _after_record_changes() hook. =cut sub record_changes { my $self = shift; my ($changeset) = validate_pos(@_, { isa => 'Prophet::ChangeSet' }); $self->_unimplemented ('record_changes') unless ($self->can_write_changesets); eval { local $SIG{__DIE__} = 'DEFAULT'; my $inside_edit = $self->current_edit ? 1 : 0; $self->begin_edit(source => $changeset) unless ($inside_edit); $self->integrate_changes($changeset); $self->_after_record_changes($changeset); $self->commit_edit() unless ($inside_edit); }; die($@) if ($@); } =head3 integrate_changes L This routine is called by L with a L object. It integrates all changes from that object into the current replica. All bookkeeping, such as opening and closing an edit, is done by L. If your replica type needs to play games to integrate multiple changes as a single record, this is what you'd override. =cut sub integrate_changes { my ($self, $changeset) = validate_pos( @_, {isa => 'Prophet::Replica'}, { isa => 'Prophet::ChangeSet' } ); $self->integrate_change($_, $changeset) for ( $changeset->changes ); } =head2 integrate_change L Integrates the given change into the current replica using the currently active replica backend. Used in L. Changes can have the following types: - add_file - add_dir - update_file - delete Trying to integrate a change of an unknown type will result in a fatal error. =cut sub integrate_change { my ($self, $change) = validate_pos(@_, { isa => 'Prophet::Replica' }, { isa => 'Prophet::Change' }, { isa => 'Prophet::ChangeSet' }, ); my %new_props = map { $_->name => $_->new_value } $change->prop_changes; if ( $change->change_type eq 'add_file' ) { $self->log_debug("add_file: " . $change->record_type . " " . $change->record_uuid); $self->create_record( type => $change->record_type, uuid => $change->record_uuid, props => \%new_props ); } elsif ( $change->change_type eq 'add_dir' ) { $self->log_debug("(IGNORED) add_dir: " . $change->record_type. " " . $change->record_uuid); } elsif ( $change->change_type eq 'update_file' ) { $self->log_debug("update_file: " .$change->record_type. " " .$change->record_uuid); $self->set_record_props( type => $change->record_type, uuid => $change->record_uuid, props => \%new_props ); } elsif ( $change->change_type eq 'delete' ) { $self->log_debug("delete_file: " . $change->record_type. " " .$change->record_uuid); $self->delete_record( type => $change->record_type, uuid => $change->record_uuid ); } else { Carp::confess( "Unknown change type: " . $change->change_type ); } } =head3 record_integration_of_changeset L This routine records the immediately upstream and original source uuid and sequence numbers for this changeset. Prophet uses this data to make sane choices about later replay and merge operations =cut sub record_integration_of_changeset { my $self = shift; my ($changeset) = validate_pos( @_, { isa => 'Prophet::ChangeSet' } ); if ( $changeset->original_source_uuid ne $self->uuid && ( $self->last_changeset_from_source( $changeset->original_source_uuid ) < $changeset->original_sequence_no ) ) { $self->record_last_changeset_from_replica( $changeset->original_source_uuid, $changeset->original_sequence_no ); } if ( $changeset->source_uuid ) { if ( $self->last_changeset_from_source( $changeset->source_uuid ) < $changeset->sequence_no ) { $self->record_last_changeset_from_replica( $changeset->source_uuid => $changeset->sequence_no ); } } } sub record_last_changeset_from_replica { my $self = shift; my ($uuid, $sequence) = validate_pos(@_, 1,1); return $self->store_local_metadata( 'last-changeset-from-' . $uuid, $sequence ); } =head2 routines which need to be implemented by any Prophet backend store =head3 uuid Returns this replica's UUID. =head3 create_record { type => $TYPE, uuid => $UUID, props => { key-value pairs } } Create a new record of type C<$TYPE> with uuid C<$UUID> within the current replica. Sets the record's properties to the key-value hash passed in as the C argument. If called from within an edit, it uses the current edit. Otherwise it manufactures and finalizes one of its own. =head3 delete_record {uuid => $UUID, type => $TYPE } Deletes the record C<$UUID> of type C<$TYPE> from the current replica. Manufactures its own new edit if C<$self->current_edit> is undefined. =head3 set_record_props { uuid => $UUID, type => $TYPE, props => {hash of kv pairs }} Updates the record of type C<$TYPE> with uuid C<$UUID> to set each property defined by the props hash. It does NOT alter any property not defined by the props hash. Manufactures its own current edit if none exists. =head3 get_record_props { uuid => $UUID, type => $TYPE, root => $ROOT } Returns a hashref of all properties for the record of type C<$TYPE> with uuid C<$UUID>. 'root' is an optional argument which you can use to pass in an alternate historical version of the replica to inspect. Code to look at the immediately previous version of a record might look like: $handle->get_record_props( type => $record->type, uuid => $record->uuid, root => $self->repo_handle->fs->revision_root( $self->repo_handle->fs->youngest_rev - 1 ) ); =head3 record_exists {uuid => $UUID, type => $TYPE } Returns true if the record in question exists and false otherwise. =head3 list_records { type => $TYPE } Returns a reference to a list of all the records of type $TYPE. =head3 list_types Returns a reference to a list of all the known types in your Prophet database. =head3 type_exists { type => $type } Returns true if we have any records of type C<$TYPE>. =head2 routines which need to be implemented by any _writable_ prophet backend store =head2 optional routines which are provided for you to override with backend-store specific behaviour =head3 _after_record_changes L Called after the replica has integrated a new changeset but before closing the current transaction/edit. The SVN backend, for example, used this to record author metadata about this changeset. =cut sub _after_record_changes { return 1; } =head3 _set_original_source_metadata_for_current_edit Sets C and C for the current edit. =cut sub _set_original_source_metadata_for_current_edit {} =head2 helper routines =cut =head3 log $MSG Logs the given message to C (but only if the C environmental variable is set). =cut sub log { my $self = shift; my ($msg) = validate_pos(@_, 1); Carp::confess unless ($self->app_handle); $self->app_handle->log($msg); } sub log_debug { my $self = shift; my $msg = shift; $self->app_handle->log_debug($self->display_name_for_replica.": " .$msg); } =head2 log_fatal $MSG Logs the given message and dies with a stack trace. =cut sub log_fatal { my $self = shift; # always skip this fatal_error function when generating a stack trace local $Carp::CarpLevel = $Carp::CarpLevel + 1; if ( eval {$self->app_handle }) { $self->app_handle->log_fatal(@_); } else { die join('',@_) ."\n"; } } =head2 changeset_creator The string to use as the creator of a changeset. =cut sub changeset_creator { my $self = shift; return $self->app_handle->current_user_email; } =head2 display_name_for_replica [uuid] If the user has a "friendly" name for this replica, then use it. Otherwise, display the replica's uuid. If you pass in a uuid, it will be used instead of the replica's uuid. =cut sub display_name_for_replica { my $self = shift; my $uuid = shift || $self->uuid; return $uuid if !$self->app_handle; return $self->app_handle->display_name_for_replica($uuid); } __PACKAGE__->meta->make_immutable(); no Any::Moose; 1; prophet-0.750/lib/Prophet/Replica/000077500000000000000000000000001160607302300167525ustar00rootroot00000000000000prophet-0.750/lib/Prophet/Replica/FS/000077500000000000000000000000001160607302300172625ustar00rootroot00000000000000prophet-0.750/lib/Prophet/Replica/FS/Backend/000077500000000000000000000000001160607302300206115ustar00rootroot00000000000000prophet-0.750/lib/Prophet/Replica/FS/Backend/File.pm000066400000000000000000000054401160607302300220310ustar00rootroot00000000000000package Prophet::Replica::FS::Backend::File; use Any::Moose; use Fcntl qw/SEEK_END/; use Params::Validate qw/validate validate_pos/; has url => ( is => 'rw', isa => 'Str'); has fs_root => ( is => 'rw', isa => 'Str'); sub read_file { my $self = shift; my ($file) = (@_); # validation is too heavy to be called here #my ($file) = validate_pos( @_, 1 ); return eval { local $SIG{__DIE__} = 'DEFAULT'; Prophet::Util->slurp( Prophet::Util->catfile( $self->fs_root => $file ) ); }; } sub read_file_range { my $self = shift; my %args = validate( @_, { path => 1, position => 1, length => 1 } ); if ($self->fs_root) { my $f = Prophet::Util->catfile( $self->fs_root => $args{path} ); return unless -e $f; if ( $^O =~ /MSWin/ ) { # XXX by sunnavy # the the open, seek and read below doesn't work on windows, at least with # strawberry perl 5.10.0.6 on windows xp # # the differences: # with substr, I got: # 0000000: 0000 0004 ecaa d794 a5fe 8c6f 6e85 0d0a ...........on... # 0000010: 7087 f0cf 1e92 b50d f9 p........ # # the read, I got # 0000000: 0000 04ec aad7 94a5 fe8c 6f6e 850d 0d0a ..........on.... # 0000010: 7087 f0cf 1e92 b50d f9 p........ # # seems with read, we got an extra 0d, I dont' know why yet :/ my $content = Prophet::Util->slurp( $f ); return substr($content, $args{position}, $args{length}); } else { open( my $index, "<:bytes", $f ) or return; seek( $index, $args{position}, SEEK_END ) or return; my $record; read( $index, $record, $args{length} ) or return; return $record; } } else { # XXX: do range get if possible my $content = $self->lwp_get( $self->url . "/" . $args{path} ); return substr($content, $args{position}, $args{length}); } } sub write_file { my $self = shift; my %args = (@_); # validation is too heavy to call here #my %args = validate( @_, { path => 1, content => 1 } ); my $file = Prophet::Util->catfile( $self->fs_root => $args{'path'} ); Prophet::Util->write_file( file => $file, content => $args{content}); } sub append_to_file { my $self = shift; my ($filename, $content) = validate_pos(@_, 1,1 ); open( my $file, ">>" . Prophet::Util->catfile( $self->fs_root => $filename) ) || die $!; print $file $content || die $!; close $file; } sub file_exists { my $self = shift; my ($file) = validate_pos( @_, 1 ); my $path = Prophet::Util->catfile( $self->fs_root, $file ); if ( -f $path ) { return 1 } elsif ( -d $path ) { return 2 } else { return 0 } } sub can_read { 1; } sub can_write { 1; } no Any::Moose; 1; prophet-0.750/lib/Prophet/Replica/FS/Backend/LWP.pm000066400000000000000000000026541160607302300216200ustar00rootroot00000000000000package Prophet::Replica::FS::Backend::LWP; use Any::Moose; use Params::Validate qw/validate validate_pos/; use LWP::UserAgent; has url => ( is => 'rw', isa => 'Str'); has lwp_useragent => ( isa => 'LWP::UserAgent', is => 'ro', lazy => 1, default => sub { my $ua = LWP::UserAgent->new( timeout => 60, keep_alive => 4, agent => "Prophet/".$Prophet::VERSION); return $ua; } ); sub read_file { my $self = shift; my ($file) = validate_pos( @_, 1 ); return $self->lwp_get( $self->url . "/" . $file ); } sub read_file_range { my $self = shift; my %args = validate( @_, { path => 1, position => 1, length => 1 } ); # XXX: do range get if possible my $content = $self->lwp_get( $self->url . "/" . $args{path} ); return substr($content, $args{position}, $args{length}); } sub lwp_get { my $self = shift; my $url = shift; my $response; for ( 1 .. 4 ) { $response = $self->lwp_useragent->get($url); if ( $response->is_success ) { return $response->content; } } warn "Could not fetch " . $url . " - " . $response->status_line . "\n"; return undef; } sub write_file { } sub append_to_file { } sub file_exists { my $self = shift; my ($file) = validate_pos( @_, 1 ); return defined $self->read_file($file) ? 1 : 0; } sub can_read { 1; } sub can_write { 0; } no Any::Moose; 1; prophet-0.750/lib/Prophet/Replica/FS/Backend/SSH.pm000066400000000000000000000001101160607302300215740ustar00rootroot00000000000000package Prophet::FSR::Backend::SSH; use Any::Moose; no Any::Moose; 1; prophet-0.750/lib/Prophet/Replica/file.pm000066400000000000000000000022641160607302300202330ustar00rootroot00000000000000package Prophet::Replica::file; use Any::Moose; extends 'Prophet::Replica::prophet'; sub scheme { 'file' } sub replica_exists { my $self = shift; return 0 unless defined $self->fs_root && -d $self->fs_root; return 0 unless -e Prophet::Util->catfile( $self->fs_root => 'database-uuid' ); return 1; } sub new { my $class = shift; my %args = @_; my @probe_types = ($args{app_handle}->default_replica_type, 'file', 'sqlite'); my %possible; for my $type (@probe_types) { my $ret; eval { my $other = "Prophet::Replica::$type"; Prophet::App->try_to_require($other); $ret = $type eq "file" ? $other->SUPER::new(@_) : $other->new(@_); }; next if $@ or not $ret; return $ret if $ret->replica_exists; $possible{$type} = $ret; } if (my $default_type = $possible{$args{app_handle}->default_replica_type} ) { return $default_type; } else { $class->log_fatal("I don't know what to do with the Prophet replica ". "type you specified: ".$args{app_handle}->default_replica_type. "\nIs your URL syntax correct?"); } } no Any::Moose; 1; prophet-0.750/lib/Prophet/Replica/http.pm000066400000000000000000000001371160607302300202700ustar00rootroot00000000000000package Prophet::Replica::http; use base 'Prophet::Replica::prophet'; sub scheme { 'http'} 1; prophet-0.750/lib/Prophet/Replica/prophet.pm000077500000000000000000000544351160607302300210070ustar00rootroot00000000000000package Prophet::Replica::prophet; use Any::Moose; extends 'Prophet::FilesystemReplica'; use Params::Validate qw(:all); use LWP::UserAgent; use LWP::ConnCache; use File::Spec (); use File::Path; use Cwd (); use File::Find; use Prophet::Util; use POSIX qw(); use Memoize; use Prophet::ContentAddressedStore; use JSON; use Digest::SHA qw(sha1_hex); has '+db_uuid' => ( lazy => 1, default => sub { shift->_read_file('database-uuid') }, ); has _uuid => ( is => 'rw', ); has _replica_version => ( is => 'rw', isa => 'Int', lazy => 1, default => sub { shift->_read_file('replica-version') || 0 } ); has fs_root_parent => ( is => 'rw', lazy => 1, default => sub { my $self = shift; if ( $self->url =~ m{^file://(.*)} ) { my $path = $1; return File::Spec->catdir( ( File::Spec->splitpath($path) )[ 0, -2 ] ); } }, ); has fs_root => ( is => 'rw', lazy => 1, default => sub { my $self = shift; return $self->url =~ m{^file://(.*)$} ? $1 : undef; }, ); has record_cas => ( is => 'rw', isa => 'Prophet::ContentAddressedStore', lazy => 1, default => sub { my $self = shift; Prophet::ContentAddressedStore->new( { fs_root => $self->fs_root, root => $self->record_cas_dir } ); }, ); has changeset_cas => ( is => 'rw', isa => 'Prophet::ContentAddressedStore', lazy => 1, default => sub { my $self = shift; Prophet::ContentAddressedStore->new( { fs_root => $self->fs_root, root => $self->changeset_cas_dir } ); }, ); has current_edit => ( is => 'rw', isa => 'Maybe[Prophet::ChangeSet]', ); has current_edit_records => ( is => 'rw', isa => 'ArrayRef', default => sub { [] }, ); has '+resolution_db_handle' => ( isa => 'Prophet::Replica | Undef', lazy => 1, default => sub { my $self = shift; return if $self->is_resdb ; return Prophet::Replica->get_handle( { url => "prophet:" . $self->url . '/resolutions', app_handle => $self->app_handle, is_resdb => 1, } ); }, ); has backend => ( lazy => 1, is => 'rw', default => sub { my $self = shift; my $be; if ($self->url =~ /^http/i) { $be = 'Prophet::Replica::FS::Backend::LWP'; } else { $be = 'Prophet::Replica::FS::Backend::File'; }; Prophet::App->require($be); return $be->new(url => $self->url, fs_root => $self->fs_root); } ); use constant scheme => 'prophet'; use constant cas_root => 'cas'; use constant record_cas_dir => File::Spec->catdir( __PACKAGE__->cas_root => 'records' ); use constant changeset_cas_dir => File::Spec->catdir( __PACKAGE__->cas_root => 'changesets' ); use constant record_dir => 'records'; use constant userdata_dir => 'userdata'; use constant changeset_index => 'changesets.idx'; use constant local_metadata_dir => 'local_metadata'; =head1 Replica Format =head4 overview $URL // /replica-uuid /latest-sequence-no /replica-version /cas/records//substr(sha1,1,1)/ /cas/changesets//substr(sha1,1,1)/ /records (optional?) / (for resolution is actually _prophet-resolution-) / which is a file containing a list of 0 or more rows last-changed-sequence-no : cas key /changesets.idx index which has records: each record is : local-replica-seq-no : original-uuid : original-seq-no : cas key ... /resolutions/ /replica-uuid /latest-sequence-no /cas//substr(sha1,1,1)/ /content (optional?) /_prophet-resolution- (cas-key == a hash the conflicting change) / (record uuid == the originating replica) last-changed-sequence-no : /changesets.idx index which has records: each record is : local-replica-seq-no : original-uuid : original-seq-no : cas key ... Inside the top level directory for the mirror, you'll find a directory named as B. This directory is the root of the published replica. The uuid uniquely identifes the database being replicated. All replicas of this database will share the same UUID. Inside the B<> directory, are a set of files and directories that make up the actual content of the database replica: =over 2 =item C Contains the replica's hex-encoded UUID. =item C Contains a single integer that defines the replica format. The current replica version is 1. =item C Contains a single integer, the replica's most recent sequence number. =item C =item C The C directory holds changesets and records, each keyed by a hex-encoded hash of the item's content. Inside the C directory, you'll find a two-level deep directory tree of single-character hex digits. You'll find the changeset with the sha1 digest C inside C. You'll find the record with the sha1 digest C inside C. TODO: define the format for changesets and records =item C Files inside the C directory are index files which list off all published versions of a record and the key necessary to retrieve the record from the I. Inside the C directory, you'll find directories named for each C in your database. Inside each C directory, you'll find a two-level directory tree of single hexadecimal digits. You'll find the record with the type and the UUID C<29A3CA16-03C5-11DD-9AE0-E25CFCEE7EC4> stored in records/Foo/2/9/29A3CA16-03C5-11DD-9AE0-E25CFCEE7EC4 The format of record files is: <40 chars of hex: cas key> The file is sorted in asecnding order by revision id. =item C The C file lists each changeset in this replica and provides an index into the B to fetch the content of the changeset. The format of record files is: <16 bytes: changeset original source uuid><16 bytes: cas key - sha1 sum of the changeset's content> The file is sorted in ascending order by revision id. =item C =over 2 =item TODO DOC RESOLUTIONS =back =back =cut =head2 BUILD Open a connection to the prophet replica source identified by C<$self->url>. =cut sub BUILD { my $self = shift; my $args = shift; Carp::cluck() unless ( $args->{app_handle} ); for ( $self->{url} ) { s/^prophet://; # url-based constructor in ::replica should do better s{/$}{}; } } =head2 replica_version Returns this replica's version. =cut sub replica_version { die "replica_version is read-only; you want set_replica_version." if @_ > 1; shift->_replica_version } =head2 set_replica_version Sets the replica's version to the given integer. =cut sub set_replica_version { my $self = shift; my $version = shift; $self->_replica_version($version); $self->_write_file( path => 'replica-version', content => $version, ); return $version; } sub can_initialize { my $self = shift; if ( $self->fs_root_parent && -w $self->fs_root_parent ) { return 1; } return 0; } use constant can_read_records => 1; use constant can_read_changesets => 1; sub can_write_changesets { return ( shift->fs_root ? 1 : 0 ) } sub can_write_records { return ( shift->fs_root ? 1 : 0 ) } sub _on_initialize_create_paths { my $self = shift; return ( $self->record_dir, $self->cas_root, $self->record_cas_dir, $self->changeset_cas_dir, $self->userdata_dir ); } sub initialize_backend { my $self = shift; my %args = validate( @_, { db_uuid => 0, resdb_uuid => 0, } ); $self->set_db_uuid( $args{'db_uuid'} || $self->uuid_generator->create_str ); $self->set_latest_sequence_no("0"); $self->set_replica_uuid( $self->uuid_generator->create_str ); $self->set_replica_version(1); $self->resolution_db_handle->initialize( db_uuid => $args{resdb_uuid} ) if !$self->is_resdb; } sub latest_sequence_no { my $self = shift; $self->_read_file('latest-sequence-no'); } sub set_latest_sequence_no { my $self = shift; my $id = shift; $self->_write_file( path => 'latest-sequence-no', content => scalar($id) ); } sub _increment_sequence_no { my $self = shift; my $seq = $self->latest_sequence_no + 1; $self->set_latest_sequence_no($seq); return $seq; } =head2 uuid Return the replica's UUID =cut sub uuid { my $self = shift; $self->_uuid( $self->_read_file('replica-uuid') ) unless $self->_uuid; # die $@ if $@; return $self->_uuid; } sub set_replica_uuid { my $self = shift; my $uuid = shift; $self->_write_file( path => 'replica-uuid', content => $uuid ); } sub set_db_uuid { my $self = shift; my $uuid = shift; $self->_write_file( path => 'database-uuid', content => $uuid ); $self->SUPER::set_db_uuid($uuid); } =head1 Internals of record handling =cut # Working with records { sub _write_record { my $self = shift; my %args = validate( @_, { record => { isa => 'Prophet::Record' }, } ); my $record = $args{'record'}; $self->_write_serialized_record( type => $record->type, uuid => $record->uuid, props => $record->get_props, ); } sub _write_serialized_record { my $self = shift; my %args = validate( @_, { type => 1, uuid => 1, props => 1 } ); for ( keys %{ $args{'props'} } ) { delete $args{'props'}->{$_} if ( !defined $args{'props'}->{$_} || $args{'props'}->{$_} eq '' ); } my $cas_key = $self->record_cas->write( $args{props} ); my $record = { uuid => $args{uuid}, type => $args{type}, cas_key => $cas_key }; $self->_prepare_record_index_update( uuid => $args{uuid}, type => $args{type}, cas_key => $cas_key ); } sub _prepare_record_index_update { my $self = shift; my %record = (@_); # If we're inside an edit, we can record the changeset info into the index if ( $self->current_edit ) { push @{ $self->current_edit_records }, \%record; } else { # If we're not inside an edit, we're likely exporting the replica # TODO: the replica exporter code should probably be retooled $self->_write_record_index_entry(%record); } } use constant RECORD_INDEX_SIZE => ( 4 + 20 ); sub _write_record_index_entry { my $self = shift; my %args = validate( @_, { type => 1, uuid => 1, cas_key => 1, changeset_id => 0 } ); my $idx_filename = $self->_record_index_filename( uuid => $args{uuid}, type => $args{type} ); my $index_path = Prophet::Util->catfile( $self->fs_root, $idx_filename ); my ( undef, $parent, $filename ) = File::Spec->splitpath($index_path); mkpath( [$parent] ); open( my $record_index, ">>" . $index_path ); # XXX TODO: skip if the index already has this version of the record; # XXX TODO FETCH THAT my $record_last_changed_changeset = $args{'changeset_id'} || 0; my $index_row = pack( 'NH40', $record_last_changed_changeset, $args{cas_key} ); print $record_index $index_row || die $!; close $record_index; } sub _read_file_range { my $self = shift; my %args = validate( @_, { path => 1, position => 1, length => 1 } ); return $self->backend->read_file_range(%args); } sub _last_record_index_entry { my $self = shift; my %args = ( type => undef, uuid => undef, @_); my $idx_filename; my $record = $self->_read_file_range( path => $self->_record_index_filename( uuid => $args{uuid}, type => $args{type}), position => (0 - RECORD_INDEX_SIZE), length => RECORD_INDEX_SIZE ) || return undef; my ( $seq, $key ) = unpack( "NH40", $record ) ; return ( $seq, $key ); } sub _read_record_index { my $self = shift; my %args = validate( @_, { type => 1, uuid => 1 } ); my $idx_filename = $self->_record_index_filename( uuid => $args{uuid}, type => $args{type} ); my $index = $self->backend->read_file($idx_filename); return undef unless $index; my $count = length($index) / RECORD_INDEX_SIZE; my @entries; for my $record ( 1 .. $count ) { my ( $seq, $key ) = unpack( 'NH40', substr( $index, ($record - 1) * RECORD_INDEX_SIZE, RECORD_INDEX_SIZE ) ); push @entries, [ $seq => $key ]; } return @entries; } sub _delete_record_index { my $self = shift; my %args = validate( @_, { type => 1, uuid => 1 } ); my $idx_filename = $self->_record_index_filename( uuid => $args{uuid}, type => $args{type} ); unlink Prophet::Util->catfile( $self->fs_root => $idx_filename ) || die "Could not delete record $idx_filename: " . $!; } sub _read_serialized_record { my $self = shift; my %args = validate( @_, { type => 1, uuid => 1 } ); my $casfile = $self->_record_cas_filename( type => $args{'type'}, uuid => $args{'uuid'} ); return undef unless $casfile; return from_json( $self->_read_file($casfile), { utf8 => 1 } ); } # XXX TODO: memoize doesn't work on win: # t\resty-server will issue the following error: # Anonymous function called in forbidden list context; faulting memoize '_record_index_filename' unless $^O =~ /MSWin/; sub _record_index_filename { my $self = shift; my %args = validate( @_, { uuid => 1, type => 1 } ); return Prophet::Util->catfile( $self->_record_type_dir( $args{'type'} ), Prophet::Util::hashed_dir_name( $args{uuid} )); } sub _record_cas_filename { my $self = shift; my %args = ( type => undef, uuid => undef, @_) ; my ( $seq, $key ) = $self->_last_record_index_entry( type => $args{'type'}, uuid => $args{'uuid'} ); return undef unless ( $key and ( $key ne '0' x 40 ) ); return $self->record_cas->filename($key) } sub _record_type_dir { my $self = shift; my $type = shift; return File::Spec->catdir( $self->record_dir, $type ); } # } =head2 changesets_for_record { uuid => $uuid, type => $type, limit => $int } Returns an ordered set of changeset objects for all changesets containing changes to this object. Note that changesets may include changes to other records If "limit" is specified, only returns that many changesets (starting from record creation). =cut sub changesets_for_record { my $self = shift; my %args = validate( @_, { uuid => 1, type => 1, limit => 0 } ); my @record_index = $self->_read_record_index( type => $args{'type'}, uuid => $args{'uuid'} ); my $changeset_index = $self->read_changeset_index(); my @changesets; for my $item (@record_index) { my $sequence = $item->[0]; push @changesets, $self->_get_changeset_via_index( sequence_no => $sequence, index_file => $changeset_index ); last if (defined $args{limit} && --$args{limit}); } return @changesets; } =head2 begin_edit Creates a new L, which new changes will be added to. =cut sub begin_edit { my $self = shift; my %args = validate( @_, { source => 0, # the changeset that we're replaying, if applicable } ); my $source = $args{source}; my $creator = $source ? $source->creator : $self->changeset_creator; my $created = $source && $source->created; require Prophet::ChangeSet; my $changeset = Prophet::ChangeSet->new( { source_uuid => $self->uuid, creator => $creator, $created ? ( created => $created ) : (), } ); $self->current_edit($changeset); $self->current_edit_records( [] ); } sub _set_original_source_metadata_for_current_edit { my $self = shift; my ($changeset) = validate_pos( @_, { isa => 'Prophet::ChangeSet' } ); $self->current_edit->original_source_uuid( $changeset->original_source_uuid ); $self->current_edit->original_sequence_no( $changeset->original_sequence_no ); } sub commit_edit { my $self = shift; my $sequence = $self->_increment_sequence_no; $self->current_edit->original_sequence_no($sequence) unless ( defined $self->current_edit->original_sequence_no ); $self->current_edit->original_source_uuid( $self->uuid ) unless ( $self->current_edit->original_source_uuid ); $self->current_edit->sequence_no($sequence); for my $record ( @{ $self->current_edit_records } ) { $self->_write_record_index_entry( changeset_id => $sequence, %$record ); } $self->_write_changeset_to_index( $self->current_edit ); } sub _write_changeset_to_index { my $self = shift; my $changeset = shift; $self->_write_changeset( changeset => $changeset ); $self->current_edit(undef); } sub _after_record_changes { my $self = shift; my ($changeset) = validate_pos( @_, { isa => 'Prophet::ChangeSet' } ); $self->current_edit->is_nullification( $changeset->is_nullification ); $self->current_edit->is_resolution( $changeset->is_resolution ); } sub create_record { my $self = shift; my %args = validate( @_, { uuid => 1, props => 1, type => 1 } ); my $inside_edit = $self->current_edit ? 1 : 0; $self->begin_edit() unless ($inside_edit); $self->_write_serialized_record( type => $args{'type'}, uuid => $args{'uuid'}, props => $args{'props'} ); my $change = Prophet::Change->new( { record_type => $args{'type'}, record_uuid => $args{'uuid'}, change_type => 'add_file' } ); for my $name ( keys %{ $args{props} } ) { $change->add_prop_change( name => $name, old => undef, new => $args{props}->{$name} ); } $self->current_edit->add_change( change => $change ); $self->commit_edit unless ($inside_edit); } sub delete_record { my $self = shift; my %args = validate( @_, { uuid => 1, type => 1 } ); my $inside_edit = $self->current_edit ? 1 : 0; $self->begin_edit() unless ($inside_edit); my $change = Prophet::Change->new( { record_type => $args{'type'}, record_uuid => $args{'uuid'}, change_type => 'delete' } ); $self->current_edit->add_change( change => $change ); $self->_prepare_record_index_update( uuid => $args{uuid}, type => $args{type}, cas_key => '0' x 40 ); $self->commit_edit() unless ($inside_edit); return 1; } sub set_record_props { my $self = shift; my %args = validate( @_, { uuid => 1, props => 1, type => 1 } ); my $inside_edit = $self->current_edit ? 1 : 0; $self->begin_edit() unless ($inside_edit); my $old_props = $self->get_record_props( uuid => $args{'uuid'}, type => $args{'type'} ); my %new_props = %$old_props; for my $prop ( keys %{ $args{props} } ) { if ( !defined $args{props}->{$prop} ) { delete $new_props{$prop}; } else { $new_props{$prop} = $args{props}->{$prop}; } } $self->_write_serialized_record( type => $args{'type'}, uuid => $args{'uuid'}, props => \%new_props ); my $change = Prophet::Change->new( { record_type => $args{'type'}, record_uuid => $args{'uuid'}, change_type => 'update_file' } ); for my $name ( keys %{ $args{props} } ) { $change->add_prop_change( name => $name, old => $old_props->{$name}, new => $args{props}->{$name} ); } $self->current_edit->add_change( change => $change ); $self->commit_edit() unless ($inside_edit); return 1; } sub get_record_props { my $self = shift; my %args = validate( @_, { uuid => 1, type => 1 } ); return $self->_read_serialized_record( uuid => $args{'uuid'}, type => $args{'type'} ); } sub record_exists { my $self = shift; my %args = validate( @_, { uuid => 1, type => 1 } ); return undef unless $args{'uuid'}; return $self->_record_cas_filename( type => $args{'type'}, uuid => $args{'uuid'} ) ? 1 : 0; } sub list_records { my $self = shift; my %args = validate( @_ => { type => 1, record_class => 1 } ); return [] unless $self->type_exists( type => $args{type} ); #return just the filenames, which, File::Find::Rule doesn't seem capable of my @record_uuids; find sub { return unless -f $_; push @record_uuids, $_ }, File::Spec->catdir( $self->fs_root => $self->_record_type_dir( $args{'type'} )); return [ map { my $record = $args{record_class}->new( { app_handle => $self->app_handle, handle => $self, type => $args{type} } ); $record->_instantiate_from_hash( uuid => $_); $record; } grep { $self->_record_cas_filename( type => $args{'type'}, uuid => $_ ) } @record_uuids ]; } sub list_types { my $self = shift; opendir( my $dh, File::Spec->catdir( $self->fs_root => $self->record_dir ) ) || die "can't open type directory $!"; my @types = grep {$_ !~ /^\./ } readdir($dh); closedir $dh; return \@types; } sub type_exists { my $self = shift; my %args = validate( @_, { type => 1 } ); return $self->_file_exists( $self->_record_type_dir( $args{'type'} ) ); } __PACKAGE__->meta->make_immutable(); no Any::Moose; 1; prophet-0.750/lib/Prophet/Replica/prophet_cache.pm000066400000000000000000000156221160607302300221220ustar00rootroot00000000000000package Prophet::Replica::prophet_cache; use Any::Moose; extends 'Prophet::FilesystemReplica'; use Params::Validate ':all'; has '+db_uuid' => ( lazy => 1, default => sub { shift->app_handle->handle->db_uuid() } ); has uuid => ( is => 'rw'); has _replica_version => ( is => 'rw', isa => 'Int', lazy => 1, default => sub { shift->_read_file('replica-version') || 0 } ); has fs_root_parent => ( is => 'rw', lazy => 1, default => sub { my $self = shift; my $path = $self->fs_root; return File::Spec->catdir( ( File::Spec->splitpath($path) )[ 0, -2 ] ); }, ); has changeset_cas => ( is => 'rw', isa => 'Prophet::ContentAddressedStore', lazy => 1, default => sub { my $self = shift; Prophet::ContentAddressedStore->new( { fs_root => $self->fs_root, root => $self->changeset_cas_dir } ); }, ); has resdb_replica_uuid => ( is => 'rw', lazy => 1, isa => 'Str', default => sub { my $self = shift; return $self->_read_file( $self->resolution_db_replica_uuid_file ); } ); has '+resolution_db_handle' => ( isa => 'Prophet::Replica | Undef', lazy => 1, weak_ref => 1, default => sub { my $self = shift; return $self if $self->is_resdb ; my $suffix = 'remote_replica_cache'; return Prophet::Replica->get_handle( { url => 'prophet_cache:'.$self->resdb_replica_uuid, fs_root => File::Spec->catdir($self->app_handle->handle->resolution_db_handle->fs_root => $suffix), app_handle => $self->app_handle, db_uuid => $self->app_handle->handle->resolution_db_handle->db_uuid, is_resdb => 1, } ); }, ); use constant userdata_dir => 'userdata'; use constant local_metadata_dir => 'local_metadata'; use constant scheme => 'prophet_cache'; use constant cas_root => 'cas'; use constant changeset_cas_dir => File::Spec->catdir( __PACKAGE__->cas_root => 'changesets' ); has fs_root => ( is => 'rw', lazy => 1, default => sub { my $self = shift; return $self->app_handle->handle->url =~ m{^file://(.*)$} ? File::Spec->catdir( $1, 'remote-replica-cache' ) : undef; }, ); use constant replica_dir => 'replica'; has changeset_index => ( is => 'rw', lazy => 1, default => sub { my $self = shift; File::Spec->catdir($self->replica_dir , $self->uuid, 'changesets.idx'); } ); has resolution_db_replica_uuid_file => ( is => 'rw', lazy => 1, default => sub { my $self = shift; File::Spec->catdir($self->replica_dir , $self->uuid, 'resolution_replica'); } ); use constant can_read_records => 0; use constant can_read_changesets => 1; sub can_write_changesets { return ( shift->fs_root ? 1 : 0 ) } use constant can_write_records => 0; sub BUILD { my $self = shift; my $args = shift; if ($self->url =~ /^prophet_cache:(.*)$/i) { my $uuid = $1; $self->uuid($uuid); if ($self->is_resdb) { $self->fs_root(File::Spec->catdir($self->app_handle->handle->resolution_db_handle->fs_root => 'remote-replica-cache' )); } else { $self->fs_root(File::Spec->catdir($self->app_handle->handle->fs_root => 'remote-replica-cache' )); } } } sub initialize_from_source { my $self = shift; my ($source) = validate_pos(@_,{isa => 'Prophet::Replica'}); my %init_args = ( db_uuid => $source->db_uuid, replica_uuid => $source->uuid, resdb_uuid => $source->resolution_db_handle->db_uuid, resdb_replica_uuid => $source->resolution_db_handle->uuid, ); $self->initialize(%init_args); # XXX only do this when we need to } sub _on_initialize_create_paths { my $self = shift; return ( $self->cas_root, $self->changeset_cas_dir, $self->replica_dir, File::Spec->catdir( $self->replica_dir, $args{'replica_uuid'} ), $self->userdata_dir ); } sub initialize_backend { my $self = shift; my %args = validate( @_, { db_uuid => 1, replica_uuid => 1, resdb_uuid => 0, resdb_replica_uuid => 0, } ); $self->set_db_uuid( $args{db_uuid} ); $self->set_resdb_replica_uuid( $args{resdb_replica_uuid} ) unless ( $self->is_resdb ); $self->resolution_db_handle->initialize( db_uuid => $args{resdb_uuid}, replica_uuid => $args{resdb_replica_uuid} ) unless ( $self->is_resdb ); } sub set_resdb_replica_uuid { my $self = shift; my $id = shift; $self->_write_file( path => $self->resolution_db_replica_uuid_file , content => scalar($id) ); } sub replica_exists { my $self = shift; if (-e File::Spec->catdir($self->fs_root, $self->changeset_index)) { return 1; } else { return undef; } } sub latest_sequence_no { my $self = shift; my $count = ((-s File::Spec->catdir($self->fs_root => $self->changeset_index )) ||0) / $self->CHG_RECORD_SIZE; return $count; } sub mirror_from { my $self = shift; my %args = validate( @_, { source => 1, reporting_callback => { type => CODEREF, optional => 1 } } ); my $source = $args{source}; if ( $source->can('read_changeset_index') ) { my $content = ${ $source->read_changeset_index } ||''; $self->_write_file( path => $self->changeset_index, content => $content ); $self->traverse_changesets( load_changesets => 0, callback => sub { my %args = (@_); my $data = $args{changeset_metadata}; my ( $seq, $orig_uuid, $orig_seq, $key ) = @{$data}; if ( -e File::Spec->catdir( $self->fs_root, $self->changeset_cas->filename($key) ) ) { return; } my $content = $source->fetch_serialized_changeset(sha1 => $key); my $newkey = $self->changeset_cas->write( $content ); if ($newkey ne $key) { warn "Original key: $key"; warn "New key $newkey"; warn "Original content:\n".$content."\n"; warn "New content:\n".$self->_read_file($self->changeset_cas->filename($newkey))."\n"; Carp::confess "writing a mirrored changeset to the CAS resulted in an inconsistent hash. Corrupted upstream?"; } } , after => 0, $args{reporting_callback} ? ( reporting_callback => $args{reporting_callback} ) : (), ); } else { warn "Sorry, we only support replicas with a changeset index file"; } } no Any::Moose; 1; prophet-0.750/lib/Prophet/Replica/sqlite.pm000066400000000000000000000656141160607302300206250ustar00rootroot00000000000000package Prophet::Replica::sqlite; use Any::Moose; extends 'Prophet::Replica'; use Params::Validate qw(:all); use File::Spec (); use File::Path; use Prophet::Util; use JSON; use Digest::SHA qw/sha1_hex/; use DBI; =head1 METHODS =cut has dbh => ( is => 'rw', isa => 'DBI::db', lazy => 1, default => sub { my $self = shift; my $dbh; die "I couldn't determine a filesystem root from the given URL.\n" ."Correct syntax is (sqlite:)file:///replica/root .\n" unless $self->db_file; eval { $dbh = DBI->connect( "dbi:SQLite:" . $self->db_file, undef, undef, { RaiseError => 1, AutoCommit => 1 }, ); $dbh->do("PRAGMA synchronous = OFF"); }; if ($@) { die "Unable to open the database file '".$self->db_file ."'. Is this a readable SQLite replica?\n"; } return $dbh; } ); sub db_file { my $self = shift; my $fs_root = $self->fs_root; return defined $fs_root ? "$fs_root/db.sqlite" : undef; } has '+db_uuid' => ( lazy => 1, default => sub { shift->fetch_local_metadata('database-uuid') }, ); has _uuid => ( is => 'rw', ); has _replica_version => ( is => 'rw', isa => 'Int', lazy => 1, default => sub { shift->fetch_local_metadata('replica-version') || 0 } ); has fs_root_parent => ( is => 'rw', lazy => 1, default => sub { my $self = shift; if ( $self->url =~ m{^(?:sqlite:)?file://(.*)} ) { my $path = $1; return File::Spec->catdir( ( File::Spec->splitpath($path) )[ 0, -2 ] ); } } ); has fs_root => ( is => 'rw', lazy => 1, default => sub { my $self = shift; return $self->url =~ m{^(?:sqlite\:)?file://(.*)$} ? $1 : undef; }, ); has current_edit => ( is => 'rw', ); has current_edit_records => ( is => 'rw', isa => 'ArrayRef', default => sub { [] }, ); has '+resolution_db_handle' => ( isa => 'Prophet::Replica | Undef', lazy => 1, default => sub { my $self = shift; return if $self->is_resdb ; return Prophet::Replica->get_handle( { url => $self->url . '/resolutions', app_handle => $self->app_handle, is_resdb => 1, } ); }, ); our $PROP_CACHE = {}; sub has_cached_prop { my $self = shift; my $prop = shift; # $self->uuid is the replica's uuid return exists $PROP_CACHE->{$self->uuid}->{$prop}; } sub fetch_cached_prop { my $self = shift; my $prop = shift; return $PROP_CACHE->{$self->uuid}->{$prop}; } sub set_cached_prop { my $self = shift; my ($prop, $value) = @_; $PROP_CACHE->{$self->uuid}->{$prop} = $value; } sub delete_cached_prop { my $self = shift; my $prop = shift; delete $PROP_CACHE->{$self->uuid}->{$prop}; } sub clear_prop_cache { my $replica_uuid = shift; delete $PROP_CACHE->{$replica_uuid}; } use constant scheme => 'sqlite'; use constant userdata_dir => 'userdata'; sub BUILD { my $self = shift; my $args = shift; Carp::cluck() unless ( $args->{app_handle} ); for ( $self->{url} ) { #s/^sqlite://; # url-based constructor in ::replica should do better s{/$}{}; } $self->_check_for_upgrades if ($self->replica_exists); } sub _check_for_upgrades { my $self = shift; if ( $self->replica_version && $self->replica_version < 2) { $self->_upgrade_replica_to_v2(); } if ( $self->replica_version && $self->replica_version < 3) { $self->_upgrade_replica_to_v3(); } if ( $self->replica_version && $self->replica_version < 5) { $self->_upgrade_replica_to_v5(); } } sub __fetch_data { my $self = shift; my $table = shift; my $key = shift; $key = lc($key); my $sth = $self->dbh->prepare("SELECT value FROM $table WHERE key = ?"); $sth->execute($key); my $results = $sth->fetchrow_arrayref; return $results?$results->[0] : undef; } sub __store_data { my $self = shift; my %args = validate(@_, { key => 1, value => 1, table => 1}); $args{key} = lc($args{key}); $self->dbh->do( "DELETE FROM $args{table} WHERE key = ?", {}, $args{key} ); $self->dbh->do( "INSERT INTO $args{table} (key,value) VALUES(?,?)", {}, $args{key}, $args{value} ); } sub fetch_local_metadata { my $self = shift; my $key = shift; return $self->__fetch_data( 'local_metadata', $key ); } sub store_local_metadata { my $self = shift; my ($key, $value) = (@_); $self->__store_data( table => 'local_metadata', key => $key, value => $value); } sub _fetch_userdata { my $self = shift; my $key = shift; return $self->__fetch_data( 'userdata', $key ); } sub _store_userdata { my $self = shift; $self->__store_data( table => 'userdata', @_ ); } =head2 replica_exists Returns true if the replica already exists / has been initialized. Returns false otherwise. =cut sub replica_exists { my $self = shift; return defined $self->db_file && -f $self->db_file ? 1 : 0; } =head2 replica_version Returns this replica's version. =cut sub replica_version { die "replica_version is read-only; you want set_replica_version." if @_ > 1; shift->_replica_version } =head2 set_replica_version Sets the replica's version to the given integer. =cut sub set_replica_version { my $self = shift; my $version = shift; $self->_replica_version($version); $self->store_local_metadata( 'replica-version' => $version,); return $version; } sub can_initialize { my $self = shift; if ( $self->fs_root_parent && -w $self->fs_root_parent ) { return 1; } return 0; } use constant can_read_records => 1; use constant can_read_changesets => 1; sub can_write_changesets {1} sub can_write_records {1} sub _on_initialize_create_paths { my $self = shift; # We initialize the root, so we just insert '' here return (''); } sub initialize_backend { my $self = shift; my %args = validate( @_, { db_uuid => 0, resdb_uuid => 0, } ); for ($self->schema) { $self->dbh->do($_) || warn $self->dbh->errstr; } $self->set_db_uuid( $args{'db_uuid'} || $self->uuid_generator->create_str ); $self->set_replica_uuid( $self->uuid_generator->create_str ); $self->set_replica_version(3); $self->resolution_db_handle->initialize( db_uuid => $args{resdb_uuid} ) if !$self->is_resdb; } sub latest_sequence_no { my $self = shift; my $sth = $self->dbh->prepare("SELECT MAX(sequence_no) FROM changesets"); $sth->execute(); return $sth->fetchrow_array || 0; } =head2 uuid Return the replica UUID =cut sub uuid { my $self = shift; $self->_uuid( $self->fetch_local_metadata('replica-uuid') ) unless $self->_uuid; return $self->_uuid; } sub set_replica_uuid { my $self = shift; my $uuid = shift; $self->store_local_metadata( 'replica-uuid' => $uuid); } sub set_db_uuid { my $self = shift; my $uuid = shift; $self->store_local_metadata( 'database-uuid', => $uuid); $self->SUPER::set_db_uuid($uuid); }; =head1 Internals of record handling =cut sub _write_record { my $self = shift; my %args = validate( @_, { record => { isa => 'Prophet::Record' }, } ); my $record = $args{'record'}; $self->_write_record_to_db( type => $record->type, uuid => $record->uuid, props => $record->get_props, ); } sub _write_record_to_db { my $self = shift; my %args = validate( @_, { type => 1, uuid => 1, props => 1 } ); for ( keys %{ $args{'props'} } ) { delete $args{'props'}->{$_} if ( !defined $args{'props'}->{$_} || $args{'props'}->{$_} eq '' ); } if ($self->record_exists( uuid => $args{uuid}, type => $args{type} ) ) { $self->_delete_record_props_from_db( uuid => $args{uuid} ) } else { $self->dbh->do( "INSERT INTO records (type, uuid) VALUES (?,?)", {}, $args{type}, $args{uuid} ); } $self->dbh->do( "INSERT INTO record_props (uuid, prop, value) VALUES (?,?,?)", {}, $args{uuid}, $_, $args{props}->{$_} ) for ( keys %{ $args{props} } ); } sub _delete_record_from_db { my $self = shift; my %args = validate( @_, { uuid => 1 } ); $self->dbh->do("DELETE FROM records where uuid = ?", {},$args{uuid}); $self->_delete_record_props_from_db(%args); } sub _delete_record_props_from_db { my $self = shift; my %args = validate( @_, { uuid => 1 } ); $self->dbh->do("DELETE FROM record_props where uuid = ?", {}, $args{uuid}); $self->delete_cached_prop( $args{uuid} ); } =head2 traverse_changesets { after => SEQUENCE_NO, UNTIL => SEQUENCE_NO, callback => sub { } } Walks through all changesets from $after to $until, calling $callback on each. If no $until is specified, the latest changeset is assumed. =cut sub traverse_changesets { my $self = shift; my %args = validate( @_, { after => 1, callback => 1, until => 0, reverse => 0, before_load_changeset_callback => { type => CODEREF, optional => 1}, reporting_callback => { type => CODEREF, optional => 1 }, load_changesets => { default => 1 } } ); my $first_rev = ( $args{'after'} + 1 ) || 1; my $latest = $self->latest_sequence_no; if ( defined $args{until} && $args{until} < $latest ) { $latest = $args{until}; } $self->log_debug("Traversing changesets between $first_rev and $latest"); my @range = ( $first_rev .. $latest ); @range = reverse @range if $args{reverse}; for my $rev (@range) { if ( $args{'before_load_changeset_callback'} ) { my $continue = $args{'before_load_changeset_callback'}->( changeset_metadata => $self->_changeset_index_entry( sequence_no => $rev, ) ); } $self->log_debug("Fetching changeset $rev"); my $data; if ( $args{load_changesets} ) { $data = $self->_load_changeset_from_db( sequence_no => $rev ); $args{callback}->( changeset =>$data); } else { $data = $self->_changeset_index_entry( sequence_no => $rev); $args{callback}->(changeset_metadata => $data); } $args{reporting_callback}->($data) if ($args{reporting_callback}); } } sub _changeset_index_entry { my $self = shift; my %args = ( sequence_no => undef, @_ ); my $row = $self->_load_changeset_metadata_from_db( sequence_no => $args{sequence_no} ); my $data = [ $row->{sequence_no}, $row->{original_source_uuid}, $row->{original_sequence_no}, $row->{sha1} ]; return $data; } sub read_changeset_index { my $self =shift; my $index = ''; $self->traverse_changesets( after=> 0, load_changesets => 0, callback => sub { my %args = (@_); my $data = $args{changeset_metadata}; my $changeset_index_line = pack( 'Na16NH40', $data->[0], $self->uuid_generator->from_string( $data->[1]), $data->[2], $data->[3]); $index .= $changeset_index_line; } ); return \$index; } =head2 changesets_for_record { uuid => $uuid, type => $type, limit => $int } Returns an ordered set of changeset objects for all changesets containing changes to this object. If "limit" is specified, only returns that many changesets (starting from record creation). Note that changesets may include changes to other records =cut sub changesets_for_record { my $self = shift; my %args = validate( @_, { uuid => 1, type => 1, limit => 0 } ); my $statement = "SELECT DISTINCT changesets.* " . "FROM changes, changesets " . "WHERE changesets.sequence_no = changes.changeset " . "AND changes.record = ?"; if (defined $args{limit}) { $statement .= " ORDER BY changesets.sequence_no LIMIT ".$args{limit}; } my $sth = $self->dbh->prepare( $statement ); require Prophet::ChangeSet; $sth->execute( $args{uuid} ); my @changesets; while ( my $cs = $sth->fetchrow_hashref() ) { push @changesets, $self->_instantiate_changeset_from_db($cs); } return @changesets; } sub fetch_serialized_changeset { my $self = shift; my %args = validate(@_, { sha1 => 1 }); my $cs = $self->_load_changeset_from_db(sha1 => $args{sha1}); return $cs->canonical_json_representation; } sub _load_changeset_from_db { my $self = shift; my %args = validate( @_, { sequence_no => 0, sha1 => 0 } ); my $data = $self->_load_changeset_metadata_from_db(%args); return $self->_instantiate_changeset_from_db($data); } sub _load_changeset_metadata_from_db { my $self = shift; my %args = validate( @_, { sequence_no => 0, sha1 => 0 } ); my ( $attr, @bind ); if ( $args{sequence_no} ) { $attr = 'sequence_no'; @bind = ( $args{sequence_no} ); } elsif ( $args{sha1} ) { $attr = 'sha1'; @bind = ( $args{sha1} ); } else { die "$self->_load_changeset_from_db called with neither a sequence_no nor a sha1"; } my $sth = $self->dbh->prepare( "SELECT creator, created, sequence_no, " . "original_source_uuid, original_sequence_no, " . "is_nullification, is_resolution, sha1 from changesets " . "WHERE $attr = ?" ); $sth->execute(@bind); my $data = $sth->fetchrow_hashref; } sub _instantiate_changeset_from_db { my $self = shift; my $data = shift; require Prophet::ChangeSet; my $changeset = Prophet::ChangeSet->new(%$data, source_uuid => $self->uuid ); my $sth = $self->dbh->prepare("SELECT id, record, change_type, record_type from changes WHERE changeset = ?"); $sth->execute($changeset->sequence_no); while (my $row = $sth->fetchrow_hashref) { my $change_id = delete $row->{id}; my $record_type = delete $row->{record_type}; my $change = Prophet::Change->new( record_uuid => $row->{record}, change_type => $row->{change_type}, record_type => $record_type ); my $propchange_sth = $self->dbh->prepare("SELECT name, old_value, new_value FROM prop_changes WHERE change = ?"); $propchange_sth->execute($change_id); while (my $pc = $propchange_sth->fetchrow_hashref) { $change->add_prop_change( name => $pc->{name}, old => $pc->{old_value}, new => $pc->{new_value}); } push @{$changeset->changes}, $change; } if(!$data->{sha1}) { my $sha1 = $changeset->calculate_sha1(); my $update_sth = $self->dbh->prepare('UPDATE changesets set sha1 = ? where sequence_no = ?'); $update_sth->execute($sha1, $changeset->sequence_no); $changeset->sha1($sha1); } return $changeset; } sub begin_edit { my $self = shift; my %args = validate( @_, { source => 0, # the changeset that we're replaying, if applicable }); my $source = $args{source}; my $creator = $source ? $source->creator : $self->changeset_creator; my $created = $source && $source->created; require Prophet::ChangeSet; my $changeset = Prophet::ChangeSet->new( { source_uuid => $self->uuid, creator => $creator, $created ? ( created => $created ) : (), }); $self->current_edit($changeset); $self->current_edit_records( [] ); $self->dbh->begin_work; } sub _set_original_source_metadata_for_current_edit { my $self = shift; my ($changeset) = validate_pos( @_, { isa => 'Prophet::ChangeSet' } ); $self->current_edit->original_source_uuid( $changeset->original_source_uuid ); $self->current_edit->original_sequence_no( $changeset->original_sequence_no ); } sub commit_edit { my $self = shift; $self->current_edit->original_source_uuid( $self->uuid ) unless ( $self->current_edit->original_source_uuid ); my $local_id = $self->_write_changeset_to_db($self->current_edit); # XXX TODO SET original_sequence_no $self->dbh->commit; $self->current_edit(undef); } sub _write_changeset_to_db { my $self = shift; my $changeset = shift; my $sha1 = $changeset->calculate_sha1(); $self->dbh->do( "INSERT INTO changesets " . "(creator, created," . "original_source_uuid, original_sequence_no, " . "is_nullification, is_resolution, sha1) " . "VALUES(?,?,?,?,?,?,?)", {}, $changeset->creator, $changeset->created, $changeset->original_source_uuid, $changeset->original_sequence_no, $changeset->is_nullification, $changeset->is_resolution, $sha1 ); my $local_id = $self->dbh->last_insert_id(undef, undef, 'changesets', 'sequence_no'); $self->dbh->do( "UPDATE changesets set original_sequence_no = sequence_no WHERE sequence_no = ?", {}, $local_id ) unless defined $changeset->original_sequence_no; for my $change (@{$changeset->changes}) { $self->_write_change_to_db($change, $local_id); } return $local_id; } sub _write_change_to_db { my $self = shift; my $change = shift; my $changeset_id = shift; $self->dbh->do( "INSERT INTO changes (record, changeset, change_type, record_type) VALUES (?,?,?,?)", {}, $change->record_uuid, $changeset_id, $change->change_type, $change->record_type ); my $change_id = $self->dbh->last_insert_id(undef, undef, 'changes', 'id'); for my $pc (@{$change->prop_changes}) { $self->_write_prop_change_to_db($change_id, $pc); } } sub _write_prop_change_to_db { my $self = shift; my $change = shift; my $pc = shift; $self->dbh->do("INSERT INTO prop_changes (change, name, old_value, new_value) VALUES (?,?,?,?)", {}, $change, $pc->name, $pc->old_value, $pc->new_value); } sub _after_record_changes { my $self = shift; my ($changeset) = validate_pos( @_, { isa => 'Prophet::ChangeSet' } ); $self->current_edit->is_nullification( $changeset->is_nullification ); $self->current_edit->is_resolution( $changeset->is_resolution ); } sub create_record { my $self = shift; my %args = validate( @_, { uuid => 1, props => 1, type => 1 } ); my $inside_edit = $self->current_edit ? 1 : 0; $self->begin_edit() unless ($inside_edit); $self->_write_record_to_db( type => $args{'type'}, uuid => $args{'uuid'}, props => $args{'props'}); my $change = Prophet::Change->new( { record_type => $args{'type'}, record_uuid => $args{'uuid'}, change_type => 'add_file' }); $change->add_prop_change( name => $_, old => undef, new => $args{props}->{$_}) for (keys %{$args{props}}); $self->current_edit->add_change( change => $change ); $self->commit_edit unless ($inside_edit); } sub delete_record { my $self = shift; my %args = validate( @_, { uuid => 1, type => 1 } ); my $inside_edit = $self->current_edit ? 1 : 0; $self->begin_edit() unless ($inside_edit); $self->_delete_record_from_db(uuid => $args{uuid}); my $change = Prophet::Change->new( { record_type => $args{'type'}, record_uuid => $args{'uuid'}, change_type => 'delete' }); $self->current_edit->add_change( change => $change ); $self->commit_edit() unless ($inside_edit); return 1; } sub set_record_props { my $self = shift; my %args = validate( @_, { uuid => 1, props => 1, type => 1 } ); my $inside_edit = $self->current_edit ? 1 : 0; $self->begin_edit() unless ($inside_edit); # clear the cache before computing the diffs. this is probably paranoid $self->delete_cached_prop( $args{uuid} ); my $old_props = $self->get_record_props( uuid => $args{'uuid'}, type => $args{'type'}); my %new_props = %$old_props; for my $prop ( keys %{ $args{props} } ) { if ( !defined $args{props}->{$prop} ) { delete $new_props{$prop}; } else { $new_props{$prop} = $args{props}->{$prop}; } } $self->_write_record_to_db( type => $args{'type'}, uuid => $args{'uuid'}, props => \%new_props); # Clear the cache now that we've actually written out changed props $self->delete_cached_prop( $args{uuid} ); my $change = Prophet::Change->new( { record_type => $args{'type'}, record_uuid => $args{'uuid'}, change_type => 'update_file' }); $change->add_prop_change( name => $_, old => $old_props->{$_}, new => $args{props}->{$_}) for (keys %{$args{props}}); $self->current_edit->add_change( change => $change ); $self->commit_edit() unless ($inside_edit); return 1; } sub get_record_props { my $self = shift; my %args = ( uuid => undef, type => undef, @_ ) ; # validate is slooow validate( @_, { uuid => 1, type => 1 } ); unless ( $self->has_cached_prop( $args{uuid} ) ) { my $sth = $self->dbh->prepare("SELECT prop, value from record_props WHERE uuid = ?"); $sth->execute( $args{uuid} ); my $items = $sth->fetchall_arrayref; $self->set_cached_prop( $args{uuid}, { map {@$_} @$items } ); } return $self->fetch_cached_prop( $args{uuid} ); } sub record_exists { my $self = shift; my %args = validate( @_, { uuid => 1, type => 1 } ); return undef unless $args{'uuid'}; my $sth = $self->dbh->prepare("SELECT luid from records WHERE type = ? AND uuid = ?"); $sth->execute($args{type}, $args{uuid}); return $sth->fetchrow_array; } =head2 list_records { type => $type } Returns a reference to a list of record objects for all records of type $type. Order is not guaranteed. =cut sub list_records { my $self = shift; my %args = validate( @_ => { type => 1, record_class => 1 } ); my @data; my $sth = $self->dbh->prepare("SELECT records.uuid, records.luid, record_props.prop, record_props.value ". "FROM records, record_props ". "WHERE records.uuid = record_props.uuid AND records.type = ?"); $sth->execute($args{type}); my %found; for (@{$sth->fetchall_arrayref}) { $found{$_->[0]}->{luid} = $_->[1]; $found{$_->[0]}->{props}->{$_->[2]} = $_->[3]; } for my $uuid (keys %found) { my $record = $args{record_class}->new( { app_handle => $self->app_handle, handle => $self, type => $args{type} } ); $record->_instantiate_from_hash( uuid => $uuid, luid => $found{$uuid}->{luid}); #$self->prop_cache->{$uuid} = $found{$uuid}->{props}; push @data, $record; } return \@data; } sub list_types { my $self = shift; my $sth = $self->dbh->prepare("SELECT DISTINCT type from records"); $sth->execute(); return [ map { $_->[0]} @{$sth->fetchall_arrayref}]; } sub type_exists { my $self = shift; my %args = (type =>undef, @_); my $sth = $self->dbh->prepare("SELECT type from records WHERE type = ? LIMIT 1"); $sth->execute($args{type}); return $sth->fetchrow_array; } =head2 read_userdata_file Returns the contents of the given file in this replica's userdata directory. Returns C if the file does not exist. =cut sub read_userdata { my $self = shift; my %args = validate( @_, { path => 1 } ); return $self->_fetch_userdata( $args{path} ); } =head2 write_userdata_file Writes the given string to the given file in this replica's userdata directory. =cut sub write_userdata { my $self = shift; my %args = validate( @_, { path => 1, content => 1 } ); $self->_store_userdata( key => $args{path}, value => $args{content}, ); } =head1 Working with luids =cut sub find_or_create_luid { my $self = shift; my %args = (uuid => undef, type => undef, @_); # validate is slooow validate( @_, { uuid => 1, type => 1 } ); return undef unless $args{'uuid'}; my $sth = $self->dbh->prepare("SELECT luid from records WHERE uuid = ?"); $sth->execute( $args{uuid}); return $sth->fetchrow_array; } sub find_luid_by_uuid { my $self = shift; my %args = validate( @_, { uuid => 1 } ); my $sth = $self->dbh->prepare("SELECT luid from records WHERE uuid = ?"); $sth->execute( $args{uuid}); return $sth->fetchrow_array; } sub find_uuid_by_luid { my $self = shift; my %args = validate( @_, { luid => 1 } ); return undef unless $args{'luid'}; my $sth = $self->dbh->prepare("SELECT uuid from records WHERE luid = ?"); $sth->execute( $args{luid}); return $sth->fetchrow_array; } sub schema { my $self = shift; return ( q{ CREATE TABLE records ( luid INTEGER PRIMARY KEY AUTOINCREMENT, uuid text, type text ) }, q{ CREATE TABLE record_props ( uuid text, prop text, value text ) }, q{ CREATE TABLE changesets ( sequence_no INTEGER PRIMARY KEY AUTOINCREMENT, creator text, created text, is_nullification boolean, is_resolution boolean, original_source_uuid text, original_sequence_no INTEGER, sha1 TEXT ) }, q{ CREATE TABLE changes ( id INTEGER PRIMARY KEY AUTOINCREMENT, record text, changeset integer, change_type text, record_type text ) }, q{ CREATE TABLE prop_changes ( change integer, name text, old_value text, new_value text ) }, q{ CREATE TABLE local_metadata ( key text, value text ) }, q{ CREATE TABLE userdata ( key text, value text ) }, q{create index uuid_idx on record_props(uuid)}, q{create index typeuuuid on records(type, uuid)}, q{create index keyidx on userdata(key)} ); } sub _upgrade_replica_to_v2 { my $self = shift; $self->_do_db_upgrades( statements => [ q{CREATE TABLE new_records (luid INTEGER PRIMARY KEY, uuid TEXT, type TEXT)}, q{INSERT INTO new_records (uuid, type) SELECT uuid, type FROM records}, q{DROP TABLE records}, q{ALTER TABLE new_records RENAME TO records} ], version => 2 ); } sub _upgrade_replica_to_v3 { my $self = shift; $self->_do_db_upgrades( statements => [ q{ALTER TABLE changesets ADD COLUMN sha1 text} ], version => 3 ); } sub _upgrade_replica_to_v5 { my $self = shift; $self->_do_db_upgrades( statements => [ q{UPDATE local_metadata SET key = lower(key)} ], version => 5 ); } sub _do_db_upgrades { my $self = shift; my %args = ( statements => undef, version => undef, @_ ); $self->dbh->begin_work; foreach my $s ( @{ $args{statements} } ) { $self->dbh->do($s) || warn $self->dbh->errstr; } $self->set_replica_version( $args{version} ); $self->dbh->commit; } sub DEMOLISH { my $self = shift; $self->dbh->disconnect if ( $self->replica_exists and $self->dbh ); } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/ReplicaExporter.pm000066400000000000000000000062351160607302300210470ustar00rootroot00000000000000package Prophet::ReplicaExporter; use Any::Moose; use Params::Validate qw(:all); use File::Spec; use Prophet::Record; use Prophet::Collection; has source_replica => ( is => 'rw', isa => 'Prophet::Replica', ); has target_path => ( is => 'rw', isa => 'Str', predicate => 'has_target_path', ); has target_replica => ( is => 'rw', isa => 'Prophet::Replica', lazy => 1, default => sub { my $self = shift; confess "No target_path specified" unless $self->has_target_path; my $replica = Prophet::Replica->get_handle(url => "prophet:file://" . $self->target_path, app_handle => $self->app_handle); my $src = $self->source_replica; my %init_args = ( db_uuid => $src->db_uuid, ); $init_args{resdb_uuid} = $src->resolution_db_handle->db_uuid if !$src->is_resdb; $replica->initialize(%init_args); return $replica; }, ); has app_handle => ( is => 'ro', isa => 'Prophet::App', weak_ref => 1, predicate => 'has_app_handle', ); =head1 NAME Prophet::ReplicaExporter =head1 DESCRIPTION A utility class which exports a replica to a serialized on-disk format =cut =head1 METHODS =head2 new Instantiates a new replica exporter object =cut =head2 export This routine will export a copy of this prophet database replica to a flat file on disk suitable for publishing via HTTP or over a local filesystem for other Prophet replicas to clone or incorporate changes from. =cut sub export { my $self = shift; $self->init_export_metadata(); print " Exporting records...\n"; $self->export_all_records(); print " Exporting changesets...\n"; $self->export_changesets(); unless ($self->source_replica->is_resdb) { my $resolutions = Prophet::ReplicaExporter->new( target_path => File::Spec->catdir($self->target_path, 'resolutions' ), source_replica => $self->source_replica->resolution_db_handle, app_handle => $self->app_handle ); print "Exporting resolution database\n"; $resolutions->export(); } } sub init_export_metadata { my $self = shift; $self->target_replica->set_latest_sequence_no( $self->source_replica->latest_sequence_no ); $self->target_replica->set_replica_uuid( $self->source_replica->uuid ); } sub export_all_records { my $self = shift; $self->export_records( type => $_ ) for ( @{ $self->source_replica->list_types } ); } sub export_records { my $self = shift; my %args = validate( @_, { type => 1 } ); my $collection = Prophet::Collection->new( app_handle => $self->app_handle, handle => $self->source_replica, type => $args{type} ); $collection->matching( sub {1} ); $self->target_replica->_write_record( record => $_ ) for @$collection; } sub export_changesets { my $self = shift; for my $changeset ( @{ $self->source_replica->fetch_changesets( after => 0 ) } ) { $self->target_replica->_write_changeset( changeset => $changeset ); } } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/ReplicaFeedExporter.pm000066400000000000000000000134101160607302300216240ustar00rootroot00000000000000package Prophet::ReplicaFeedExporter; use Any::Moose; use IO::Handle; extends 'Prophet::ReplicaExporter'; has output_handle => ( is => 'rw', lazy => 1, default => sub { my $self = shift; if ($self->has_target_path) { open(my $outs, ">", $self->target_path) || die $!; $outs->autoflush(1); return $outs; } else { return ; } } ); my $feed_updated; sub output { my $self = shift; my $content = shift; $self->output_handle->print( $content); } sub export { my $self = shift; $self->output( $self->feed_header()); $self->source_replica->resolution_db_handle->traverse_changesets( after => 0, callback => sub { my %args = shift; $self->output( $self->format_resolution_changeset($args{changeset})); } ); $self->source_replica->traverse_changesets( after => 0, callback => sub { my %args = (@_); $self->output( $self->format_changeset($args{changeset})); } ); $self->output( tag( 'updated', $feed_updated )); $self->output( ""); } sub feed_header { my $self = shift; return join( "\n", '', '', tag( 'id' => 'urn:uuid:' . $self->source_replica->uuid ), tag( 'title' => 'Prophet feed of ' . $self->source_replica->db_uuid ), tag( 'prophet:latest-sequence', $self->source_replica->latest_sequence_no ) ); } sub format_resolution_changeset { my $self = shift; my $cs = shift; $feed_updated = $cs->created_as_rfc3339; return tag( 'entry', undef, sub { my $output = tag( author => undef, sub { tag( name => $cs->creator ) } ) . tag(title => 'Resolution ' . $cs->sequence_no . ' by ' . ( $cs->creator || 'nobody' ) . ' @ ' . $cs->original_source_uuid ) . tag( id => 'prophet:' . $cs->original_sequence_no . '@' . $cs->original_source_uuid ) . tag( published => $cs->created_as_rfc3339 ) . tag( updated => $cs->created_as_rfc3339 ) . '' . "\n" . tag('prophet:resolution') . tag( 'prophet:sequence' => $cs->sequence_no ) . output_changes($cs) . ""."\n"; return $output; } ); } sub format_changeset { my $self = shift; my $cs = shift; $feed_updated = $cs->created_as_rfc3339; return tag( 'entry', undef, sub { my $output = tag( author => undef, sub { tag( name => $cs->creator ) } ) . tag(title => 'Change ' . $cs->sequence_no . ' by ' . ( $cs->creator || 'nobody' ) . ' @ ' . $cs->original_source_uuid ) . tag( id => 'prophet:' . $cs->original_sequence_no . '@' . $cs->original_source_uuid ) . tag( published => $cs->created_as_rfc3339 ) . tag( updated => $cs->created_as_rfc3339 ) . '' . "\n" . tag( 'prophet:sequence' => $cs->sequence_no ) . ( $cs->is_nullification ? tag( 'prophet:nullifcation' => $cs->is_nullification ) : '' ) . ( $cs->is_resolution ? tag( 'prophet:resolution' => $cs->is_resolution ) : '' ) . output_changes($cs) . ''; return $output; } ); } sub output_changes { my $cs = shift; my $output = ''; foreach my $change ( $cs->changes ) { $output .= tag( 'prophet:change', undef, sub { my $change_data = tag( 'prophet:type', $change->record_type ) . tag( 'prophet:uuid', $change->record_uuid ) . tag( 'prophet:change-type', $change->change_type ) . ( $change->is_resolution ? tag('prophet:resolution') : '' ) . ( $change->resolution_cas ? tag( 'prophet:resolution-fingerprint', $change->resolution_cas ) : '' ); foreach my $prop_change ( $change->prop_changes ) { $change_data .= tag( 'prophet:property', undef, sub { tag( 'prophet:name' => $prop_change->name ) . tag( 'prophet:old' => $prop_change->old_value ) . tag( 'prophet:new' => $prop_change->new_value ); } ); } return $change_data; } ); return $output; } return $output; } my $depth = 0; sub tag ($$;&) { my $tag = shift; my $value = shift; my $content = shift; my $output; $depth++; $output .= " " x $depth; if ( !$content && !defined $value ) { $output .= "<$tag/>\n"; } else { $output .= "<$tag>"; if ($value) { Prophet::Util::escape_utf8( \$value ); $output .= $value; } if ($content) { $output .= "\n"; $output .= $content->(); $output .= " " x $depth; } $output .= "" . "\n"; } $depth--; return $output; } __PACKAGE__->meta->make_immutable; no Any::Moose; prophet-0.750/lib/Prophet/Resolver.pm000066400000000000000000000001431160607302300175300ustar00rootroot00000000000000package Prophet::Resolver; use Any::Moose; __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Resolver/000077500000000000000000000000001160607302300171745ustar00rootroot00000000000000prophet-0.750/lib/Prophet/Resolver/AlwaysSource.pm000066400000000000000000000006251160607302300221560ustar00rootroot00000000000000package Prophet::Resolver::AlwaysSource; use Any::Moose; use Prophet::Change; extends 'Prophet::Resolver'; sub run { my $self = shift; my $conflicting_change = shift; return 0 if $conflicting_change->file_op_conflict; my $resolution = Prophet::Change->new_from_conflict($conflicting_change); return $resolution; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Resolver/AlwaysTarget.pm000066400000000000000000000021001160607302300221320ustar00rootroot00000000000000package Prophet::Resolver::AlwaysTarget; use Any::Moose; use Data::Dumper; extends 'Prophet::Resolver'; sub run { my $self = shift; my $conflicting_change = shift; my $conflict = shift; my $resolution = Prophet::Change->new_from_conflict($conflicting_change); my $file_op_conflict = $conflicting_change->file_op_conflict || ''; if ( $file_op_conflict eq 'update_missing_file' ) { $resolution->change_type('delete'); return $resolution; } elsif ( $file_op_conflict eq 'delete_missing_file' ) { return $resolution; } elsif ( $file_op_conflict ) { die "Unknown file_op_conflict $file_op_conflict: " . Dumper($conflict,$conflicting_change); } for my $prop_change ( @{ $conflicting_change->prop_conflicts } ) { $resolution->add_prop_change( name => $prop_change->name, old => $prop_change->source_new_value, new => $prop_change->target_value ); } return $resolution; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Resolver/Failed.pm000066400000000000000000000006371160607302300207240ustar00rootroot00000000000000package Prophet::Resolver::Failed; use Any::Moose; use Data::Dumper; extends 'Prophet::Resolver'; $Data::Dumper::Indent = 1; sub run { my $self = shift; my $conflicting_change = shift; my $conflict = shift; die "The conflict was not resolved! Sorry dude." . Dumper($conflict, $conflicting_change); } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Resolver/Fixup/000077500000000000000000000000001160607302300202675ustar00rootroot00000000000000prophet-0.750/lib/Prophet/Resolver/Fixup/MissingSourceOldValues.pm000066400000000000000000000014121160607302300252340ustar00rootroot00000000000000package Prophet::Resolver::Fixup::MissingSourceOldValues; use Any::Moose; extends 'Prophet::Resolver'; sub run { my $self = shift; my $conflicting_change = shift; return 0 if $conflicting_change->file_op_conflict; my $resolution = Prophet::Change->new_from_conflict($conflicting_change); for my $prop_conflict ( @{ $conflicting_change->prop_conflicts } ) { if ( defined $prop_conflict->source_old_value && $prop_conflict->source_old_value ne '' ) { return 0; } #$resolution->add_prop_change( name => $prop_conflict->name, old => $prop_conflict->target_value, new => $prop_conflict->source_new_value); } return $resolution; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Resolver/FromResolutionDB.pm000066400000000000000000000027351160607302300227360ustar00rootroot00000000000000package Prophet::Resolver::FromResolutionDB; use Any::Moose; use Prophet::Change; use Prophet::Collection; use JSON; use Digest::SHA 'sha1_hex'; extends 'Prophet::Resolver'; sub run { my $self = shift; my $conflicting_change = shift; my $conflict = shift; my $resdb = shift; # XXX: we want diffrent collection actually now require Prophet::Collection; my $res = Prophet::Collection->new( handle => $resdb, # XXX TODO PULL THIS TYPE FROM A CONSTANT type => '_prophet_resolution-' . $conflicting_change->fingerprint ); $res->matching( sub {1} ); return unless $res->count; my %answer_map; my %answer_count; for my $answer ($res->items) { my $key = sha1_hex( to_json($answer->get_props, {utf8 => 1, pretty => 1, canonical => 1})); $answer_map{$key} ||= $answer; $answer_count{$key}++; } my $best = ( sort { $answer_count{$b} <=> $answer_count{$a} } keys %answer_map )[0]; my $answer = $answer_map{$best}; my $resolution = Prophet::Change->new_from_conflict($conflicting_change); for my $prop_conflict ( @{ $conflicting_change->prop_conflicts } ) { $resolution->add_prop_change( name => $prop_conflict->name, old => $prop_conflict->source_old_value, new => $answer->prop( $prop_conflict->name ), ); } return $resolution; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Resolver/IdenticalChanges.pm000066400000000000000000000036101160607302300227170ustar00rootroot00000000000000package Prophet::Resolver::IdenticalChanges; use Any::Moose; use Params::Validate qw(:all); use Prophet::Change; extends 'Prophet::Resolver'; =head1 METHODS =head2 attempt_automatic_conflict_resolution Given a L which can not be cleanly applied to a replica, it is sometimes possible to automatically determine a sane resolution to the conflict. =over =item * When the new-state of the conflicting change matches the previous head of the replica. =item * When someone else has previously done the resolution and we have a copy of that hanging around. =back In those cases, this routine will generate a L which resolves as many conflicts as possible. It will then update the conclicting changes to mark which Ls and L have been automatically resolved. =cut sub run { my $self = shift; my ( $conflicting_change, $conflict, $resdb ) = validate_pos( @_, { isa => 'Prophet::ConflictingChange' }, { isa => 'Prophet::Conflict' }, 0 ); # for everything from the changeset that is the same as the old value of the target replica # we can skip applying return 0 if $conflicting_change->file_op_conflict; my $resolution = Prophet::Change->new_from_conflict($conflicting_change); for my $prop_change ( @{ $conflicting_change->prop_conflicts } ) { next if ((!defined $prop_change->target_value || $prop_change->target_value eq '') && ( !defined $prop_change->source_new_value || $prop_change->source_new_value eq '')); next if (defined $prop_change->target_value and defined $prop_change->source_new_value and ( $prop_change->target_value eq $prop_change->source_new_value)); return 0; } $conflict->autoresolved(1); return $resolution; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Resolver/Prompt.pm000066400000000000000000000035661160607302300210250ustar00rootroot00000000000000package Prophet::Resolver::Prompt; use Any::Moose; extends 'Prophet::Resolver'; sub run { my $self = shift; my $conflicting_change = shift; return 0 if $conflicting_change->file_op_conflict; my $resolution = Prophet::Change->new_from_conflict($conflicting_change); print "Oh no! There's a conflict between this replica and the one you're syncing from:\n"; print $conflicting_change->record_type . " " . $conflicting_change->record_uuid . "\n"; for my $prop_conflict ( @{ $conflicting_change->prop_conflicts } ) { print $prop_conflict->name . ": \n"; my %values; for (qw/target_value source_old_value source_new_value/) { $values{$_} = $prop_conflict->$_; $values{$_} = "(undefined)" if !defined($values{$_}); } print "(T)ARGET $values{target_value}\n"; print "SOURCE (O)LD $values{source_old_value}\n"; print "SOURCE (N)EW $values{source_new_value}\n"; while ( my $choice = lc( substr( || 'T', 0, 1 ) ) ) { if ( $choice eq 't' ) { $resolution->add_prop_change( name => $prop_conflict->name, old => $prop_conflict->source_new_value, new => $prop_conflict->target_value ); last; } elsif ( $choice eq 'o' ) { $resolution->add_prop_change( name => $prop_conflict->name, old => $prop_conflict->source_new_value, new => $prop_conflict->source_old_value ); last; } elsif ( $choice eq 'n' ) { last; } else { print "(T), (O) or (N)? "; } } } return $resolution; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Server.pm000066400000000000000000000337231160607302300172070ustar00rootroot00000000000000package Prophet::Server; use Any::Moose; # this instead of extends silences "You inherit from non-Mouse class" warning use base 'HTTP::Server::Simple::CGI'; use Prophet::Server::Controller; use Prophet::Server::View; use Prophet::Server::Dispatcher; use Prophet::Server::Controller; use Prophet::Web::Menu; use Prophet::Web::Result; use Params::Validate qw/:all/; use File::Spec (); use Cwd (); use JSON; use HTTP::Date; # Only define a class type constraint for CGI if it's not already defined, # because Moose doesn't auto-define class type constraints while Mouse does. unless (find_type_constraint('CGI')) { use Any::Moose '::Util::TypeConstraints'; class_type('CGI'); } has app_handle => ( isa => 'Prophet::App', is => 'rw', handles => [qw/handle/] ); has cgi => ( isa => 'CGI|Undef', is => 'rw' ); has page_nav => ( isa => 'Prophet::Web::Menu|Undef', is => 'rw' ); has read_only => ( isa => 'Bool', is => 'rw' ); has static => ( isa => 'Bool', is => 'rw'); has view_class => ( isa => 'Str', is => 'rw' ); has result => ( isa => 'Prophet::Web::Result', is => 'rw' ); has port => ( isa => 'Str', is => 'rw', default => sub { my $self = shift; return $self->app_handle->config->get( key => 'server.default-port' ) || '8008'; } ); sub run { my $self = shift; my $publisher = eval { require Net::Rendezvous::Publish; Net::Rendezvous::Publish->new; }; eval { require Template::Declare } || die "Without Template::Declare installed, Prophet's Web UI won't work"; eval { require File::ShareDir } || die "Without File::ShareDir installed, Prophet's Web UI won't work"; if ($publisher) { $publisher->publish( name => $self->database_bonjour_name, type => '_prophet._tcp', port => $self->port, domain => 'local', ); } else { $self->app_handle->log( "Publisher backend is not available. Install one of the " . "Net::Rendezvous::Publish::Backend modules from CPAN."); } $self->setup_template_roots(); print ref($self) . ": Starting up local server. You can stop the server with Ctrl-c.\n"; eval { $self->SUPER::run(@_); }; if ($@) { if ( $@ =~ m/^bind to \*:(\d+): Address already in use/ ) { die "\nPort $1 is already in use! Start the server on a different port using --port.\n"; } else { die "\nError while starting server:\n\n$@\n"; } } } =head2 database_bonjour_name Returns the name this database should use to announce itself via bonjour =cut sub database_bonjour_name { my $self = shift; return $self->handle->db_uuid; } sub setup_template_roots { my $self = shift; my $view_class = ref( $self->app_handle ) . "::Server::View"; if ( Prophet::App->try_to_require($view_class) ) { $self->view_class($view_class); } else { $self->view_class( 'Prophet::Server::View' ); } Template::Declare->init( roots => [$self->view_class] ); } our $PROPHET_STATIC_ROOT; sub prophet_static_root { my $self = shift; unless ($PROPHET_STATIC_ROOT) { $PROPHET_STATIC_ROOT = File::Spec->catdir( Prophet::Util->updir( $INC{'Prophet.pm'}, 2 ), "share", "web", "static" ); eval { require File::ShareDir; 1 } or die "requires File::ShareDir to determine default static root"; $PROPHET_STATIC_ROOT = Prophet::Util->catfile( File::ShareDir::dist_dir('Prophet'), 'web/static' ) if ( !-d $PROPHET_STATIC_ROOT ); $PROPHET_STATIC_ROOT = Cwd::abs_path($PROPHET_STATIC_ROOT); } return $PROPHET_STATIC_ROOT; } our $APP_STATIC_ROOT; sub app_static_root { my $self = shift; unless ($APP_STATIC_ROOT) { my $app_file = ref($self->app_handle) .".pm"; $app_file =~ s|::|/|g; $APP_STATIC_ROOT = File::Spec->catdir( Prophet::Util->updir( $INC{$app_file}, 3 ), "share", "web", "static" ); my $dist = ref($self->app_handle); $dist =~ s/::/-/g; eval { require File::ShareDir; 1 } or die "requires File::ShareDir to determine default static root"; $APP_STATIC_ROOT = Prophet::Util->catfile( File::ShareDir::dist_dir($dist), 'web', 'static' ) if ( !-d $APP_STATIC_ROOT ); $APP_STATIC_ROOT = Cwd::abs_path($APP_STATIC_ROOT); } return $APP_STATIC_ROOT; } # Use system-installed CSS and Javascript libraries if they exist, so distros # have the option to not ship our embedded copies. # # I'm not sure if RPM-based systems have a standard location for system # Javascript libraries, but this ought to work on Debian/Ubuntu. Patches # welcome. sub system_js_and_css { my $mapping = { 'yui/css/reset.css' => '/usr/share/javascript/yui/reset/reset.css', 'jquery/js/jquery-1.2.6.min.js', => '/usr/share/javascript/jquery/jquery.min.js', }; return $mapping; } sub css { return '/static/prophet/yui/css/reset.css', '/static/prophet/jquery/css/superfish.css', '/static/prophet/jquery/css/superfish-navbar.css', '/static/prophet/jquery/css/jquery.autocomplete.css', '/static/prophet/jquery/css/tablesorter/style.css', } sub js { return '/static/prophet/jquery/js/jquery-1.2.6.min.js', '/static/prophet/jquery/js/pretty.js', '/static/prophet/jquery/js/hoverIntent.js', '/static/prophet/jquery/js/jquery.bgiframe.min.js', '/static/prophet/jquery/js/jquery-autocomplete.js', '/static/prophet/jquery/js/superfish.js', '/static/prophet/jquery/js/supersubs.js', '/static/prophet/jquery/js/jquery.tablesorter.min.js' } sub handle_request { my ( $self, $cgi ) = validate_pos( @_, { isa => 'Prophet::Server' }, { isa => 'CGI' } ); $self->cgi($cgi); $self->log_request(); $self->page_nav( Prophet::Web::Menu->new( cgi => $self->cgi, server => $self) ); $self->result( Prophet::Web::Result->new() ); if ( $ENV{'PROPHET_DEVEL'} ) { require Module::Refresh; Module::Refresh->refresh(); } my $controller = Prophet::Server::Controller->new( cgi => $self->cgi, app_handle => $self->app_handle, result => $self->result ); $controller->handle_functions(); my $dispatcher_class = ref( $self->app_handle ) . "::Server::Dispatcher"; if ( !$self->app_handle->try_to_require($dispatcher_class) ) { $dispatcher_class = "Prophet::Server::Dispatcher"; } my $d = $dispatcher_class->new( server => $self ); my $path = Path::Dispatcher::Path->new( path => $cgi->path_info, metadata => { method => $cgi->request_method, }, ); $d->run( $path, $d ) || $self->_send_404; } sub log_request { my $self = shift; my $cgi = $self->cgi; $self->app_handle->log_debug( localtime()." [".$ENV{'REMOTE_ADDR'}."] ".$cgi->request_method . " ".$cgi->path_info); } sub update_record_prop { my $self = shift; my $type = shift; my $uuid = shift; my $prop = shift; my $record = $self->load_record( type => $type, uuid => $uuid ); return $self->_send_404 unless ($record); $record->set_props( props => { $prop => ( $self->cgi->param('value') || undef ) } ); return $self->_send_redirect( to => "/records/$type/$uuid/$prop" ); } sub update_record { my $self = shift; my $type = shift; my $uuid = shift; my $record = $self->load_record( type => $type, uuid => $uuid ); return $self->_send_404 unless ($record); my $ret = $record->set_props( props => { map { $_ => $self->cgi->param($_) } $self->cgi->param() } ); $self->_send_redirect( to => "/records/$type/$uuid.json" ); } sub create_record { my $self = shift; my $type = shift; my $record = $self->load_record( type => $type ); my $uuid = $record->create( props => { map { $_ => $self->cgi->param($_) } $self->cgi->param() } ); return $self->_send_redirect( to => "/records/$type/$uuid.json" ); } sub get_record_prop { my $self = shift; my $type = shift; my $uuid = shift; my $prop = shift; my $record = $self->load_record( type => $type, uuid => $uuid ); return $self->_send_404 unless ($record); if ( my $val = $record->prop($prop) ) { return $self->send_content( content_type => 'text/plain', content => $val ); } else { return $self->_send_404(); } } sub get_record { my $self = shift; my $type = shift; my $uuid = shift; my $record = $self->load_record( type => $type, uuid => $uuid ); return $self->_send_404 unless ($record); return $self->send_content( encode_as => 'json', content => $record->get_props ); } sub get_record_list { my $self = shift; my $type = shift; require Prophet::Collection; my $col = Prophet::Collection->new( handle => $self->handle, type => $type ); $col->matching( sub {1} ); warn "Query language not implemented yet."; return $self->send_content( encode_as => 'json', content => { map { $_->uuid => "/records/$type/" . $_->uuid . ".json" } @$col } ); } sub get_record_types { my $self = shift; $self->send_content( encode_as => 'json', content => $self->handle->list_types ); } sub serve_replica { my $self = shift; my $repo_file = shift; return undef unless $self->handle->can('read_file'); my $content = $self->handle->read_file($repo_file); return unless defined $content && length($content); $self->send_replica_content($content); } sub send_replica_content { my $self = shift; my $content = shift; return $self->send_content( content_type => 'application/x-prophet', content => $content ); } sub show_template { my $self = shift; my $p = shift; my $content = $self->render_template($p,@_); if ($content) { return $self->send_content( content_type => 'text/html; charset=UTF-8', content => $content,);} return undef; } sub render_template { my $self = shift; my $p = shift; if ( Template::Declare->has_template($p) ) { $self->view_class->app_handle( $self->app_handle ); $self->view_class->cgi( $self->cgi ); $self->view_class->page_nav( $self->page_nav ); $self->view_class->server($self); my $content = Template::Declare->show($p,@_); return $content; } return undef; } sub load_record { my $self = shift; my %args = validate( @_, { type => 1, uuid => 0 } ); require Prophet::Record; my $record = Prophet::Record->new( handle => $self->handle, type => $args{type} ); if ( $args{'uuid'} ) { return undef unless ( $self->handle->record_exists( type => $args{'type'}, uuid => $args{'uuid'} ) ); $record->load( uuid => $args{uuid} ); } return $record; } sub send_static_file { my $self = shift; my $filename = shift; my $type = 'text/html'; if ( $filename =~ /.js$/ ) { $type = 'text/javascript'; } elsif ( $filename =~ /.css$/ ) { $type = 'text/css'; } elsif ( $filename =~ /.png$/ ) { $type = 'image/png'; } my $system_library_mapping = $self->system_js_and_css(); my $content; if ( $system_library_mapping->{ $filename } && -f $system_library_mapping->{ $filename } ) { $content = Prophet::Util->slurp( $system_library_mapping->{ $filename } ); } else { for my $root ( $self->app_static_root, $self->prophet_static_root) { next unless -f Prophet::Util->catfile( $root => $filename ); my $qualified_file = Cwd::fast_abs_path( File::Spec->catfile( $root => $filename ) ); next if substr( $qualified_file, 0, length($root) ) ne $root; $content = Prophet::Util->slurp($qualified_file); } } if ( defined $content ) { return $self->send_content( static => 1, content => $content, content_type => $type ); } else { return $self->_send_404; } } sub send_content { my $self = shift; my %args = validate( @_, { content => 1, content_type => 0, encode_as => 0, static => 0 } ); if ( $args{'encode_as'} && $args{'encode_as'} eq 'json' ) { $args{'content_type'} = 'text/x-json'; $args{'content'} = to_json( $args{'content'} ); } print "HTTP/1.0 200 OK\r\n"; if ($args{static}) { print 'Cache-Control: max-age=31536000, public' ; print 'Expires: '.HTTP::Date::time2str( time() + 31536000 ) ; } print "Content-Type: " . $args{'content_type'} . "\r\n"; print "Content-Length: " . length( $args{'content'} ||'' ) . "\r\n\r\n"; print $args{'content'} || ''; return '200'; } sub _send_401 { my $self = shift; print "HTTP/1.0 401 READONLY_SERVER\r\n"; # TODO give an actual error page? return '401'; } sub _send_404 { my $self = shift; print "HTTP/1.0 404 ENOFILE\r\n"; return '404'; } sub _send_redirect { my $self = shift; my %args = validate( @_, { to => 1 } ); print "HTTP/1.0 302 Go over there\r\n"; print "Location: " . $args{'to'} . "\r\n"; return '302'; } =head2 make_link_relative PATH This method does its best to convert a URI path from absolute ( starts at / ) to relative. (Starts at .). =cut sub make_link_relative { my $self = shift; my $link = shift; return URI::file->new($link)->rel("file://".$self->cgi->path_info()); } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Server/000077500000000000000000000000001160607302300166415ustar00rootroot00000000000000prophet-0.750/lib/Prophet/Server/Controller.pm000066400000000000000000000177201160607302300213310ustar00rootroot00000000000000package Prophet::Server::Controller; use Any::Moose; use Prophet::Util; use Prophet::Web::Result; has cgi => ( is => 'rw', isa => 'CGI' ); has failure_message => ( is => 'rw', isa => 'Str' ); has functions => ( is => 'rw', isa => 'HashRef' ); has app_handle => ( is => 'rw', isa => 'Prophet::App' ); has result => ( is => 'ro', isa => 'Prophet::Web::Result' ); =head1 NAME =head1 METHODS =head1 DESCRIPTION =cut =head1 METHODS =cut sub extract_functions_from_cgi { my $self = shift; my $functions = {}; foreach my $param ( $self->cgi->all_parameters ) { next unless $param =~ /^prophet-function-(.*)$/; my $name = $1; $self->app_handle->log_fatal( "Duplicate function definition for @{[$name]}." ) if ( exists $functions->{$name} ); my $function_data = $self->cgi->param($param); my $attr = $self->string_to_hash($function_data); $attr->{name} = $name; # For now, always execute $attr->{execute} = 1; # We MUST validate any function we're going to canonicalize $attr->{validate} = 1 if $attr->{execute}; # We MUST canonicalize any function we're going to validate $attr->{canonicalize} = 1 if $attr->{validate}; $functions->{$name} = $attr; $functions->{$name}->{params} = $self->params_for_function_from_cgi($name); } $self->functions($functions); } sub params_for_function_from_cgi { my $self = shift; my $function = shift; my $values; for my $field ( $self->cgi->all_parameters ) { if ( $field =~ /^prophet-field-function-$function-prop-(.*)$/ ) { my $name = $1; $values->{$name} = { prop => $name, value => ($self->cgi->param($field) || undef), original_value => ($self->cgi->param( "original-value-" . $field ) || undef) }; } elsif ( $field =~ /^prophet-fill-function-$function-prop-(.*)$/ ) { my $name = $1; my $meta = {}; my $value = $self->cgi->param($field); next unless ( $value =~ /^function-(.*)\|result-(.*)$/ ); $values->{$name} = { prop => $name, from_function => $1, from_result => $2 }; } else { next; } } return $values; } sub handle_functions { my $self = shift; my @workflow = qw( extract_functions_from_cgi canonicalize_functions validate_functions execute_functions ); eval { for (@workflow) { $self->$_() ; } }; if (my $err = $@) { $self->result->success(0); $self->result->message($err); } } sub canonicalize_functions { my $self = shift; my $functions = $self->functions; foreach my $function ( sort { $functions->{$a}->{order} <=> $functions->{$b}->{order}} keys %{$functions}) { next unless ($functions->{$function}->{canonicalize}); foreach my $param ( keys %{ $functions->{$function}->{params} } ) { if ( defined $functions->{$function}->{params}->{$param}->{original_value} && ($functions->{$function}->{params}->{$param}->{original_value} eq $functions->{$function}->{params}->{$param}->{value} )) { delete $functions->{$function}->{params}->{$param}; next; } } } } sub validate_functions { my $self = shift; my $functions = $self->functions; foreach my $function ( sort { $functions->{$a}->{order} <=> $functions->{$b}->{order}} keys %{$functions}) { next unless ($functions->{$function}->{validate}); foreach my $param ( keys %{ $functions->{$function}->{params} } ) { if (0) { } } } sub execute_functions { my $self = shift; my $functions = $self->functions; foreach my $function ( sort { $functions->{$a}->{order} <=> $functions->{$b}->{order}} keys %{$functions}) { $self->app_handle->log_debug("About to execute a function - ".$function); $self->_fill_params_from_previous_functions($function); next unless ($functions->{$function}->{execute}); if ($functions->{$function}->{action} eq 'update') { $self->_exec_function_update($functions->{$function}); } elsif ($functions->{$function}->{action} eq 'create') { $self->_exec_function_create($functions->{$function}); } else { die "I don't know how to handle a ".$functions->{$function}->{action}; } } } sub _fill_params_from_previous_functions { my $self = shift; my $function = shift; my $params = $self->functions->{$function}->{params}; foreach my $param ( keys %$params ) { if ( my $from_function = $params->{$param}->{from_function} ) { my $from_result = $params->{$param}->{from_result}; my $function_result = $self->result->get($from_function); # XXX TODO - $from_result should be locked down tighter if ( $function_result->can($from_result) ) { $params->{$param}->{value} = $function_result->$from_result(); } } } } sub _get_record_for_function { my $self = shift; my $function = shift; my $functions = $self->functions; if ($functions->{$function}->{action} eq 'update') { return Prophet::Util->instantiate_record( uuid => $functions->{$function}->{uuid}, class=>$functions->{$function}->{class}, app_handle=> $self->app_handle); } elsif ($functions->{$function}->{action} eq 'create') { die $functions->{$function}->{class} ." is not a valid class " unless (UNIVERSAL::isa($functions->{$function}->{class}, 'Prophet::Record')); return $functions->{$function}->{class}->new( app_handle => $self->app_handle); } else { die "I don't know how to handle a ".$functions->{$function}->{action}; } } } sub _exec_function_create { my $self = shift; my $function = shift; my $object = $self->_get_record_for_function($function->{name}); my ( $val, $msg ) = $object->create( props => { map { $function->{params}->{$_}->{prop} => $function->{params}->{$_}->{value} } keys %{ $function->{params} } } ); my $res = Prophet::Web::FunctionResult->new( function_name => $function->{name}, class => $function->{class}, success => $object->uuid? 1 :0, record_uuid => $object->uuid, msg => ($msg || 'Record created')); $self->result->set($function->{name} => $res); } sub _exec_function_update { my $self = shift; my $function = shift; my $object = $self->_get_record_for_function($function->{name}); my ( $val, $msg ) = $object->set_props( props => { map { $function->{params}->{$_}->{prop} => $function->{params}->{$_}->{value} } keys %{ $function->{params} } } ); my $res = Prophet::Web::FunctionResult->new( function_name => $function->{name}, class => $function->{class}, success => $val? 1 :0, record_uuid => $object->uuid, msg => ($msg || 'Record updated')); $self->result->set($function->{name} => $res); } sub string_to_hash { my $self = shift; my $data = shift; my @bits = grep {$_} split( /\|/, $data ); my %attr = map { split( /=/, $_ ) } @bits; return \%attr; } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Server/Dispatcher.pm000066400000000000000000000064451160607302300212760ustar00rootroot00000000000000package Prophet::Server::Dispatcher; use Any::Moose; use Path::Dispatcher::Declarative -base, -default => { token_delimiter => '/', }; has server => ( isa => 'Prophet::Server', is => 'rw', weak_ref => 1 ); under { method => 'POST' } => sub { on qr'.*' => sub { my $self = shift; return $self->server->_send_401 if ( $self->server->read_only ); next_rule; }; under qr'/records' => sub { on qr|^/(.*)/(.*)/(.*)$| => sub { shift->server->update_record_prop($1,$2,$3) }; on qr|^/(.*)/(.*).json$| => sub { shift->server->update_record($1,$2) }; on qr|^/(.*).json$| => sub { shift->server->create_record($1) }; }; }; under { method => 'GET' } => sub { on qr'^/=/prophet/autocomplete' => sub { shift->server->show_template('/_prophet_autocompleter') }; on qr'^/static/prophet/(.*)$' => sub { shift->server->send_static_file($1)}; on qr'^/records.json' => sub { shift->server->get_record_types }; under qr'/records' => sub { on qr|^/(.*)/(.*)/(.*)$| => sub { shift->server->get_record_prop($1,$2,$3); }; on qr|^/(.*)/(.*).json$| => sub { shift->server->get_record($1,$2) }; on qr|^/(.*).json$| => sub { shift->server->get_record_list($1) }; }; on qr'^/replica(/resolutions)?' => sub { my $self = shift; if ($1 && $1 eq '/resolutions') { $_->metadata->{replica_handle} = $self->server->app_handle->handle->resolution_db_handle; } else { $_->metadata->{replica_handle} = $self->server->app_handle->handle; } next_rule; }; under qr'^/replica(/resolutions/)?' => sub { on 'replica-version' => sub { shift->server->send_replica_content('1')}; on 'replica-uuid' => sub { my $self = shift; $self->server->send_replica_content( $_->metadata->{replica_handle}->uuid ); }; on 'database-uuid' => sub { my $self = shift; $self->server->send_replica_content( $_->metadata->{replica_handle}->db_uuid ); }; on 'latest-sequence-no' => sub { my $self = shift; $self->server->send_replica_content( $_->metadata->{replica_handle}->latest_sequence_no ); }; on 'changesets.idx' => sub { my $self = shift; my $index = ''; my $repl = $_->metadata->{replica_handle}; $repl->traverse_changesets( after=> 0, load_changesets => 0, callback => sub { my %args = (@_); my $data = $args{changeset_metadata}; my $changeset_index_line = pack( 'Na16NH40', $data->[0], $repl->uuid_generator->from_string( $data->[1]), $data->[2], $data->[3]); $index .= $changeset_index_line; } ); $self->server->send_replica_content($index); }; on qr|cas/changesets/././(.{40})$| => sub { my $self = shift; my $sha1 = $1; $self->server->send_replica_content($_->metadata->{replica_handle}->fetch_serialized_changeset(sha1 => $sha1)); } ; }; }; on qr'^(.*)$' => sub { shift->server->show_template($1) || next_rule; }; __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Server/View.pm000066400000000000000000000174521160607302300201220ustar00rootroot00000000000000use strict; use warnings; package Prophet::Server::View; use base 'Template::Declare'; use Template::Declare::Tags; use URI::file; # Prophet::Server::ViewHelpers overwrites the form {} function provided by # Template::Declare::Tags. ViewHelpers uses Exporter::Lite which does not "use # warnings". When prove -w or make test is run, $^W is set which turns on # warnings in Exporter::Lite (most importantly, redefinition warnings). We # don't want to warn about this specific redefinition, so we swap out # $SIG{__WARN__} to shut up about it. BEGIN { no warnings 'redefine'; my $old_warn = $SIG{__WARN__} || sub { warn $_[0] }; local $SIG{__WARN__} = sub { my $warning = shift; $old_warn->($warning) unless $warning =~ /Subroutine Prophet::Server::View::form redefined /; }; require Prophet::Server::ViewHelpers; Prophet::Server::ViewHelpers->import; } use Params::Validate; use Prophet::Web::Menu; our $APP_HANDLE; sub app_handle { my $self = shift; $APP_HANDLE = shift if (@_); return $APP_HANDLE; } our $CGI; sub cgi { my $self = shift; $CGI = shift if (@_); return $CGI; } our $MENU; sub page_nav { my $self = shift; $MENU = shift if (@_); return $MENU; } our $SERVER; sub server { my $self = shift; $SERVER = shift if (@_); return $SERVER; } template '_prophet_autocompleter' => sub { my $self = shift; my %args; for (qw(q function record type class prop)) { $args{$_} = $self->cgi->param($_); } my $obj = Prophet::Util->instantiate_record( class => $self->cgi->param('class'), uuid => $self->cgi->param('uuid'), app_handle => $self->app_handle ); my @possible; if ( $obj) { my $canon = { $args{prop} => $args{q} }; $obj->canonicalize_prop( $args{'prop'}, $canon, {} ); if ( $canon->{ $args{prop} } ne $args{q} ) { push @possible, $canon->{ $args{'prop'} }; } } if ( $obj->loaded ) { push @possible, $obj->prop( $args{'prop'} ); } else { my $params = { $args{'prop'} => undef }; $obj->default_props($params); push @possible, $params->{ $args{'prop'} }; # XXX fill in defaults; } push @possible, $obj->recommended_values_for_prop( $args{'prop'} ); my %seen; for ( grep { defined && !$seen{$_}++ } @possible ) { outs( $_ . "\n" ); #." | ".$_."\n"); } }; sub default_page_title { 'Prophet' } template head => sub { my $self = shift; my @args = shift; head { meta { attr { content => "text/html; charset=utf-8", 'http-equiv' => "Content-Type" }}; title { shift @args }; for ( $self->server->css ) { link { { rel is 'stylesheet', href is link_to($_), type is "text/css", media is 'screen'} }; } for ( $self->server->js ) { script { { src is link_to($_), type is "text/javascript" } }; } } }; template footer => sub { }; template header => sub { my $self = shift; my $title = shift; if ($self->page_nav) { div { { class is 'page-nav'}; outs_raw($self->page_nav->render_as_menubar); }; } h1 { $title }; }; template '/' => page { h1 { "This is a Prophet replica!" } }; sub record_table { my %args = validate(@_, { records => 1, url_prefix => { default => '' }, }); my $records = $args{records}; my $prefix = $args{url_prefix}; table { my @items = $records ? $records->items : (); if (@items) { my @headers = $items[0]->_parse_format_summary; row { for (@headers) { th { $_->{prop} } } } } for my $record (sort { $a->luid <=> $b->luid } @items) { my $type = $record->type; my $uuid = $record->uuid; my @atoms = $record->format_summary; row { attr { id => "$type-$uuid", class => "$type" }; for my $i (0 .. $#atoms) { my $atom = $atoms[$i]; my $prop = $atom->{prop}; cell { attr { class => "prop-$prop", }; if ($i == 0) { a { attr { href => link_to("$prefix$uuid.html"), }; outs $atom->{value}; } } else { outs $atom->{value}; } } } } } } } template record_table => page { my $self = shift; my $records = shift; record_table(records => $records); }; template record => page { my $self = shift; my $record = shift; p { a { attr { href => link_to("index.html"), }; outs "index"; } } hr {} dl { dt { 'UUID' } dd { $record->uuid } dt { 'LUID' } dd { $record->luid }; my $props = $record->get_props; for my $prop (sort keys %$props) { dt { $prop } dd { $props->{$prop} } } }; hr {} h3 { "History" }; show record_changesets => $record; # linked collections for my $method ($record->collection_reference_methods) { my $collection = $record->$method; next if $collection->count == 0; my $type = $collection->record_class->type; hr {} h3 { "Linked $type records" } record_table( records => $collection, url_prefix => "../$type/", ); } }; private template record_changesets => sub { my $self = shift; my $record = shift; my $uuid = $record->uuid; ol { for my $change ($record->changes) { my @prop_changes = $change->prop_changes; next if @prop_changes == 0; if (@prop_changes == 1) { li { $prop_changes[0]->summary }; next; } li { ul { for my $prop_change (@prop_changes) { li { outs $prop_change->summary; } } } } } } }; sub generate_changeset_feed { my $self = shift; my %args = validate(@_, { handle => 1, title => 0, }); my $handle = $args{handle}; my $title = $args{title} || 'Prophet replica ' . $handle->uuid; require XML::Atom::SimpleFeed; my $feed = XML::Atom::SimpleFeed->new( id => "urn:uuid:" . $handle->uuid, title => $title, author => $self->app_handle->current_user_email, ); my $newest = $handle->latest_sequence_no; my $start = $newest - 20; $start = 0 if $start < 0; $handle->traverse_changesets( after => $start, callback => sub { my %args = (@_); $feed->add_entry( title => 'Changeset ' . $args{changeset}->sequence_no, # need uuid or absolute link :( category => 'Changeset', ); }, ); return $feed; } sub link_to ($) { my $link = shift; return URI::file->new($link)->rel("file://".$ENV{REQUEST_URI}); } 1; prophet-0.750/lib/Prophet/Server/ViewHelpers.pm000066400000000000000000000035121160607302300214350ustar00rootroot00000000000000package Prophet::Server::ViewHelpers; use warnings; use strict; use base 'Exporter::Lite'; use Params::Validate qw/validate/; use Template::Declare::Tags; use Prophet::Web::Field; our @EXPORT = (qw(form page content widget function param_from_function hidden_param)); use Prophet::Server::ViewHelpers::Widget; use Prophet::Server::ViewHelpers::Function; use Prophet::Server::ViewHelpers::ParamFromFunction; use Prophet::Server::ViewHelpers::HiddenParam; sub page (&;$) { unshift @_, undef if $#_ == 0; my ( $meta, $code ) = @_; sub { my $self = shift; my @args = @_; my $title = $self->default_page_title; $title = $meta->( $self, @args ) if $meta; html { attr { xmlns => 'http://www.w3.org/1999/xhtml' }; show( 'head' => $title ); body { div { class is 'page'; show('header', $title); div { class is 'body'; $code->( $self, @args ); } } }; show('footer'); } } } sub content (&) { my $sub_ref = shift; return $sub_ref; } sub function { my $f = Prophet::Server::ViewHelpers::Function->new(@_); $f->render; return $f; } sub param_from_function { my $w = Prophet::Server::ViewHelpers::ParamFromFunction->new(@_); $w->render; return $w; } sub hidden_param { my $w = Prophet::Server::ViewHelpers::HiddenParam->new(@_); $w->render; return $w; } sub widget { my $w = Prophet::Server::ViewHelpers::Widget->new(@_); $w->render; return $w; } BEGIN { no warnings 'redefine'; *old_form = \&form; *form = sub (&;$){ my $code = shift; old_form ( sub { attr { method => 'post'}; $code->(@_); } ) }}; 1; prophet-0.750/lib/Prophet/Server/ViewHelpers/000077500000000000000000000000001160607302300210765ustar00rootroot00000000000000prophet-0.750/lib/Prophet/Server/ViewHelpers/Function.pm000066400000000000000000000031301160607302300232160ustar00rootroot00000000000000package Prophet::Server::ViewHelpers::Function; use Template::Declare::Tags; BEGIN { delete ${__PACKAGE__."::"}{meta}; delete ${__PACKAGE__."::"}{with}; } =head1 NAME =head1 METHODS =head1 DESCRIPTION =cut =head1 METHODS =cut use Any::Moose; use Any::Moose 'Util::TypeConstraints'; has record => ( isa => 'Prophet::Record', is => 'ro' ); has action => ( isa => ( enum [qw(create update delete)] ), is => 'ro' ); has order => ( isa => 'Int', is => 'ro' ); has validate => ( isa => 'Bool', is => 'rw', default => 1); has canonicalize => ( isa => 'Bool', is => 'rw', default => 1); has execute => ( isa => 'Bool', is => 'rw', default => 1); has name => ( isa => 'Str', is => 'rw', #regex => qr/^(?:|[\w\d]+)$/, ); sub new { my $self = shift->SUPER::new(@_); $self->name ( ($self->record->loaded ? $self->record->uuid : 'new') . "-" . $self->action ) unless ($self->name); return $self; } sub render { my $self = shift; my %bits = ( order => $self->order, action => $self->action, type => $self->record->type, class => ref($self->record), uuid => $self->record->uuid, validate => $self->validate, canonicalize => $self->canonicalize, execute => $self->execute ); my $string = "|" . join( "|", map { $bits{$_} ? $_ . "=" . $bits{$_} : '' } keys %bits ) . "|"; outs_raw(qq{}); } __PACKAGE__->meta->make_immutable(inline_constructor => 0); no Any::Moose; 1; prophet-0.750/lib/Prophet/Server/ViewHelpers/HiddenParam.pm000066400000000000000000000015101160607302300236050ustar00rootroot00000000000000package Prophet::Server::ViewHelpers::HiddenParam; use Template::Declare::Tags; BEGIN { delete ${__PACKAGE__."::"}{meta}; delete ${__PACKAGE__."::"}{with}; } use Any::Moose; extends 'Prophet::Server::ViewHelpers::Widget'; use Any::Moose 'Util::TypeConstraints'; has value => ( isa => 'Str', is => 'rw'); sub render { my $self = shift; my $unique_name = $self->_generate_name(); my $record = $self->function->record; $self->field( Prophet::Web::Field->new( name => $unique_name, id => $unique_name, record => $record, class => 'hidden-prop-'.$self->prop.' function-'.$self->function->name, value => $self->value, type => 'hidden') ); outs_raw( $self->field->render_input ); } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Server/ViewHelpers/ParamFromFunction.pm000066400000000000000000000026151160607302300250320ustar00rootroot00000000000000package Prophet::Server::ViewHelpers::ParamFromFunction; use Template::Declare::Tags; BEGIN { delete ${__PACKAGE__."::"}{meta}; delete ${__PACKAGE__."::"}{with}; } use Any::Moose; use Any::Moose 'Util::TypeConstraints'; =head1 NAME =head1 METHODS =head1 DESCRIPTION =cut has function => ( isa => 'Prophet::Server::ViewHelpers::Function', is => 'ro' ); has name => ( isa => 'Str', is => 'rw' ); has prop => ( isa => 'Str', is => 'ro' ); has from_function => ( isa => 'Prophet::Server::ViewHelpers::Function', is => 'rw' ); has from_result => ( isa => 'Str', is => 'rw' ); has field => ( isa => 'Prophet::Web::Field', is => 'rw' ); sub render { my $self = shift; my $unique_name = $self->_generate_name(); my $record = $self->function->record; my $value = "function-".$self->from_function->name."|result-".$self->from_result; $self->field( Prophet::Web::Field->new( name => $unique_name, type => 'hidden', record => $record, value => $value )); outs_raw( $self->field->render_input ); } sub _generate_name { my $self = shift; return "prophet-fill-function-" . $self->function->name . "-prop-" . $self->prop; } =head1 METHODS =cut __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Server/ViewHelpers/Widget.pm000066400000000000000000000052731160607302300226660ustar00rootroot00000000000000package Prophet::Server::ViewHelpers::Widget; use Template::Declare::Tags; BEGIN { delete ${__PACKAGE__."::"}{meta}; delete ${__PACKAGE__."::"}{with}; } use Any::Moose; =head1 NAME =head1 METHODS =head1 DESCRIPTION =cut has function => ( isa => 'Prophet::Server::ViewHelpers::Function', is => 'ro' ); has name => ( isa => 'Str', is => 'rw' ); has prop => ( isa => 'Str', is => 'ro' ); has field => ( isa => 'Prophet::Web::Field', is => 'rw'); has type => ( isa => 'Str|Undef', is => 'rw'); has autocomplete => (isa => 'Bool', is => 'rw', default => 1); has default => ( isa => 'Str|Undef', is => 'rw'); sub render { my $self = shift; my $unique_name = $self->_generate_name(); my $record = $self->function->record; my $value; if (defined $self->default) { $value = $self->default; } elsif ( $self->function->action eq 'create' ) { if ( my $method = $self->function->record->can( 'default_prop_' . $self->prop ) ) { $value = $method->( $self->function->record ); } else { $value = ''; } } elsif ( $self->function->action eq 'update' && $self->function->record->loaded ) { $value = $self->function->record->prop( $self->prop ) || ''; } else { $value = ''; } $self->field( Prophet::Web::Field->new( name => $unique_name, id => $unique_name, record => $record, label => $self->prop, class => 'prop-'.$self->prop.' function-'.$self->function->name, value => $value, ($self->type ? ( type => $self->type) : ()) )); my $orig = Prophet::Web::Field->new( name => "original-value-". $unique_name, value => $value, type => 'hidden' ); outs_raw( $self->field->render ); outs_raw( $orig->render_input ); if ($self->autocomplete) { $self->_render_autocompleter(); } } sub _render_autocompleter { my $self = shift; my $record = $self->function->record(); outs_raw(' '); } sub _generate_name { my $self = shift; return "prophet-field-function-" . $self->function->name . "-prop-" . $self->prop; } =head1 METHODS =cut __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Test.pm000066400000000000000000000226111160607302300166520ustar00rootroot00000000000000package Prophet::Test; use strict; use warnings; use base qw/Test::More Exporter/; our @EXPORT = qw/as_alice as_bob as_charlie as_david as_user repo_uri_for replica_last_rev replica_uuid_for ok_added_revisions replica_uuid database_uuid database_uuid_for serialize_conflict serialize_changeset in_gladiator diag run_command set_editor set_editor_script load_record last_script_stdout last_script_stderr last_script_exit_code /; use Cwd qw/getcwd/; use File::Path 'rmtree'; use File::Spec; use File::Temp qw/tempdir tempfile/; use Params::Validate ':all'; use Prophet::Util; use Prophet::CLI; our $REPO_BASE = File::Temp::tempdir(); Test::More->import; diag( "Replicas can be found in $REPO_BASE" ); # by default, load no configuration file $ENV{PROPHET_APP_CONFIG} = ''; { no warnings 'redefine'; require Test::More; sub Test::More::diag { # bad bad bad # convenient convenient convenient Test::More->builder->diag(@_) if ( $Test::Harness::Verbose || $ENV{'TEST_VERBOSE'} ); } } our $EDIT_TEXT = sub { shift }; do { no warnings 'redefine'; *Prophet::CLI::Command::edit_text = sub { my $self = shift; $EDIT_TEXT->(@_); }; }; =head2 set_editor($code) Sets the subroutine that Prophet should use instead of C (as this routine invokes an interactive editor) to $code. =cut sub set_editor { $EDIT_TEXT = shift; } =head2 set_editor_script SCRIPT Sets the editor that Proc::InvokeEditor uses. This should be a non-interactive script found in F. =cut sub set_editor_script { my ($self, $script) = @_; delete $ENV{'VISUAL'}; # Proc::InvokeEditor checks this first $ENV{'EDITOR'} = "$^X " . Prophet::Util->catfile(getcwd(), 't', 'scripts', $script); Test::More::diag "export EDITOR=" . $ENV{'EDITOR'} . "\n"; } =head2 import_extra($class, $args) =cut sub import_extra { my $class = shift; my $args = shift; Test::More->export_to_level(2); # Now, clobber Test::Builder::plan (if we got given a plan) so we # don't try to spit one out *again* later if ( $class->builder->has_plan ) { no warnings 'redefine'; *Test::Builder::plan = sub { }; } delete $ENV{'PROPHET_APP_CONFIG'}; $ENV{'PROPHET_EMAIL'} = 'nobody@example.com'; } =head2 in_gladiator($code) Run the given code using L. =cut sub in_gladiator (&) { my $code = shift; local $Test::Builder::Level = $Test::Builder::Level + 1; my $types; eval { require Devel::Gladiator; }; if ($@) { warn 'Devel::Gladiator not found'; return $code->(); } for ( @{ Devel::Gladiator::walk_arena() } ) { $types->{ ref($_) }--; } $code->(); for ( @{ Devel::Gladiator::walk_arena() } ) { $types->{ ref($_) }++; } map { $types->{$_} || delete $types->{$_} } keys %$types; } =head2 repo_path_for($username) Returns a path on disk for where $username's replica is stored. =cut sub repo_path_for { my $username = shift; return File::Spec->catdir($REPO_BASE => $username); } sub config_file_for { my $username = shift; return File::Spec->catdir($REPO_BASE, $username, 'config'); } =head2 repo_uri_for($username) Returns a file:// URI for $USERNAME'S replica (with the correct replica type prefix). =cut sub repo_uri_for { my $username = shift; my $path = repo_path_for($username); return 'file://' . $path; } =head2 replica_uuid Returns the UUID of the test replica. =cut sub replica_uuid { my $self = shift; my $cli = Prophet::CLI->new(); return $cli->handle->uuid; } =head2 database_uuid Returns the UUID of the test database. =cut sub database_uuid { my $self = shift; my $cli = Prophet::CLI->new(); return eval { $cli->handle->db_uuid}; } =head2 replica_last_rev Returns the sequence number of the last change in the test replica. =cut sub replica_last_rev { my $cli = Prophet::CLI->new(); return $cli->handle->latest_sequence_no; } =head2 as_user($username, $coderef) Run this code block as $username. This routine sets up the %ENV hash so that when we go looking for a repository, we get the user's repo. =cut our %REPLICA_UUIDS; our %DATABASE_UUIDS; sub as_user { my $username = shift; my $coderef = shift; local $ENV{'PROPHET_REPO'} = repo_path_for($username); local $ENV{'PROPHET_EMAIL'} = $username . '@example.com'; local $ENV{'PROPHET_APP_CONFIG'} = config_file_for($username); my $ret = $coderef->(); $REPLICA_UUIDS{$username} = replica_uuid(); $DATABASE_UUIDS{$username} = database_uuid(); return $ret; } =head2 replica_uuid_for($username) Returns the UUID of the given user's test replica. =cut sub replica_uuid_for { my $user = shift; return $REPLICA_UUIDS{$user}; } =head2 database_uuid_for($username) Returns the UUID of the given user's test database. =cut sub database_uuid_for { my $user = shift; return $DATABASE_UUIDS{$user}; } =head2 ok_added_revisions( { CODE }, $numbers_of_new_revisions, $msg) Checks that the given code block adds the given number of changes to the test replica. $msg is optional and will be printed with the test if given. =cut sub ok_added_revisions (&$$) { my ( $code, $num, $msg ) = @_; local $Test::Builder::Level = $Test::Builder::Level + 1; my $last_rev = replica_last_rev(); $code->(); is( replica_last_rev(), $last_rev + $num, $msg ); } =head2 serialize_conflict($conflict_obj) Returns a simple, serialized version of a L object suitable for comparison in tests. The serialized version is a hash reference containing the following keys: meta => { original_source_uuid => 'source_replica_uuid' } records => { 'record_uuid' => { change_type => 'type', props => { propchange_name => { source_old => 'old_val', source_new => 'new_val', target_old => 'target_val', } } }, 'another_record_uuid' => { change_type => 'type', props => { propchange_name => { source_old => 'old_val', source_new => 'new_val', target_old => 'target_val', } } }, } =cut sub serialize_conflict { my ($conflict_obj) = validate_pos( @_, { isa => 'Prophet::Conflict' } ); my $conflicts; for my $change ( @{ $conflict_obj->conflicting_changes } ) { $conflicts->{meta} = { original_source_uuid => $conflict_obj->changeset->original_source_uuid }; $conflicts->{records}->{ $change->record_uuid } = { change_type => $change->change_type, }; for my $propchange ( @{ $change->prop_conflicts } ) { $conflicts->{records}->{ $change->record_uuid }->{props}->{ $propchange->name } = { source_old => $propchange->source_old_value, source_new => $propchange->source_new_value, target_old => $propchange->target_value } } } return $conflicts; } =head2 serialize_changeset($changeset_obj) Returns a simple, serialized version of a L object suitable for comparison in tests (a hash). =cut sub serialize_changeset { my ($cs) = validate_pos( @_, { isa => 'Prophet::ChangeSet' } ); return $cs->as_hash; } =head2 run_command($command, @args) Run the given command with (optionally) the given args using a new L object. Returns the standard output of that command in scalar form or, in array context, the STDOUT in scalar form *and* the STDERR in scalar form. Examples: run_command('create', '--type=Foo'); =cut our $CLI_CLASS = 'Prophet::CLI'; sub run_command { my $output = ''; my $error = ''; my $original_stdout = *STDOUT; my $original_stderr = *STDERR; open( my $out_handle, '>', \$output ); open( my $err_handle, '>', \$error ); *STDOUT = $out_handle; *STDERR = $err_handle; $|++; # autoflush my $ret = eval { local $SIG{__DIE__} = 'DEFAULT'; $CLI_CLASS->new->run_one_command(@_); }; warn $@ if $@; # restore to originals *STDOUT = $original_stdout; *STDERR = $original_stderr; return wantarray ? ($output, $error) : $output; } { =head2 load_record($type, $uuid) Loads and returns a record object for the record with the given type and uuid. =cut my $connection; sub load_record { my $type = shift; my $uuid = shift; require Prophet::Record; $connection ||= Prophet::CLI->new->handle; my $record = Prophet::Record->new(handle => $connection, type => $type); $record->load(uuid => $uuid); return $record; } } =head2 as_alice CODE, as_bob CODE, as_charlie CODE, as_david CODE Runs CODE as alice, bob, charlie or david. =cut sub as_alice (&) { as_user( alice => shift ) } sub as_bob (&) { as_user( bob => shift ) } sub as_charlie(&) { as_user( charlie => shift ) } sub as_david(&) { as_user( david => shift ) } # END { # for (qw(alice bob charlie david)) { # # as_user( $_, sub { rmtree [ $ENV{'PROPHET_REPO'} ] } ); # } # } 1; prophet-0.750/lib/Prophet/Test/000077500000000000000000000000001160607302300163125ustar00rootroot00000000000000prophet-0.750/lib/Prophet/Test/Arena.pm000066400000000000000000000107541160607302300177050ustar00rootroot00000000000000package Prophet::Test::Arena; use Any::Moose; has chickens => ( is => 'rw', isa => 'ArrayRef', default => sub { [] }, auto_deref => 1, ); has record_callback => ( is => 'rw', isa => 'CodeRef', ); has history => ( is => 'rw', isa => 'ArrayRef', default => sub { [] }, ); sub add_history { my $self = shift; push @{ $self->history }, @_; } use Prophet::Test::Participant; use Prophet::Test; sub setup { my $self = shift; my $count = shift; my @names = ref $count ? @$count : ( map { "person" . $_ } (1..$count)); my @chickens = map { Prophet::Test::Participant->new( { name => $_, arena => $self } ) } @names; for my $c (@chickens) { as_user($c->name => sub { my $p = Prophet::CLI->new(); diag($c => $p->handle->display_name_for_replica); }); } $self->chickens(\@chickens); } sub run_from_yaml { my $self = shift; my @c = caller(0); no strict 'refs'; my $fh = *{ $c[0] . '::DATA' }; return $self->run_from_yamlfile(@ARGV) unless fileno($fh); local $/; eval { require YAML::Syck; } || Test::More::plan(skip_all => 'YAML::Syck required for these tests'); $self->run_from_data( YAML::Syck::Load(<$fh>) ); } sub run_from_yamlfile { my ( $self, $file ) = @_; eval { require YAML::Syck; } || Test::More::plan(skip_all => 'YAML::Syck required for these tests'); $self->run_from_data( YAML::Syck::LoadFile($file) ); } sub run_from_data { my ( $self, $data ) = @_; Test::More::plan( tests => scalar @{ $data->{recipe} } + scalar @{ $data->{chickens} } ); my $arena = Prophet::Test::Arena->new( { record_callback => sub { my ( $name, $action, $args ) = @_; return; }, } ); $arena->setup( $data->{chickens} ); my $record_map; for ( @{ $data->{recipe} } ) { my ( $name, $action, $args ) = @$_; my ($chicken) = grep { $_->name eq $name } $arena->chickens; if ( $args->{record} ) { $args->{record} = $record_map->{ $args->{record} }; } my $next_result = $args->{result}; as_user( $chicken->name, sub { @_ = ( $chicken, $action, $args ); goto $chicken->can('take_one_step'); } ); if ( $args->{result} ) { $record_map->{$next_result} = $args->{result}; } } # my $third = $arena->dump_state; # $arena->sync_all_pairs; # my $fourth = $arena->dump_state; # is_deeply($third,$fourth); } my $TB = Test::Builder->new(); sub step { my $self = shift; my $step_name = shift || undef; my $step_display = defined($step_name) ? $step_name : "(undef)"; for my $chicken ($self->chickens) { diag(" as ".$chicken->name. ": $step_display"); # walk the arena, noting the type of each value as_user( $chicken->name, sub { $chicken->take_one_step($step_name) } ); die "We failed some tests; aborting" if grep { !$_ } $TB->summary; } # for x rounds, have each participant execute a random action } sub dump_state { my $self = shift; my %state; for my $chicken ($self->chickens) { $state{ $chicken->name } = as_user( $chicken->name, sub { $chicken->dump_state } ); } return \%state; } use List::Util qw/shuffle/; sub sync_all_pairs { my $self = shift; diag("now syncing all pairs"); my @chickens_a = shuffle $self->chickens; my @chickens_b = shuffle $self->chickens; for my $a (@chickens_a) { for my $b (@chickens_b) { next if $a->name eq $b->name; diag( $a->name, $b->name ); as_user( $a->name, sub { $a->sync_from_peer( { from => $b->name } ) } ); die if ( grep { !$_ } $TB->summary ); } } return 1; } sub record { my ( $self, $name, $action, $args ) = @_; my $stored = {%$args}; if ( my $record = $stored->{record} ) { $stored->{record} = $self->{record_map}{$record}; } elsif ( my $result = $stored->{result} ) { $stored->{result} = $self->{record_map}{$result} = ++$self->{record_cnt}; } return $self->record_callback->( $name, $action, $args ) if $self->record_callback; # XXX: move to some kind of recorder class and make use of callback $self->add_history([$name, $action, $stored]); } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Test/Editor.pm000066400000000000000000000061041160607302300200770ustar00rootroot00000000000000package Prophet::Test::Editor; use strict; use warnings; use Prophet::Util; use Params::Validate; use File::Spec; =head1 METHODS =head2 edit( tmpl_files => $tmpl_files, edit_callback => sub {}, verify_callback => sub {} ) Expects @ARGV to contain at least an option and a file to be edited. It can also contain a replica uuid, a ticket uuid, and a status file. The last item must always be the file to be edited. The others, if they appear, must be in that order after the option. The status file must contain the string 'status' in its filename. edit_callback is called on each line of the file being edited. It should make any edits to the lines it receives and then print what it wants to be saved to the file. verify_callback is called after editing is done. If you need to write whether the template was correct to a status file, for example, this should be done here. =cut sub edit { my %args = @_; validate( @_, { edit_callback => 1, verify_callback => 1, tmpl_files => 1, } ); my $option = shift @ARGV; my $tmpl_file = $args{tmpl_files}->{$option}; my @valid_template = Prophet::Util->slurp("t/data/$tmpl_file"); chomp @valid_template; my $status_file = $ARGV[-2] =~ /status/ ? delete $ARGV[-2] : undef; # a bit of a hack to dermine whether the last arg is a filename my $replica_uuid = File::Spec->file_name_is_absolute($ARGV[0]) ? undef : shift @ARGV; my $ticket_uuid = File::Spec->file_name_is_absolute($ARGV[0]) ? undef : shift @ARGV; my @template = (); while (<>) { chomp( my $line = $_ ); push @template, $line; $args{edit_callback}( option => $option, template => \@template, valid_template => \@valid_template, replica_uuid => $replica_uuid, ticket_uuid => $ticket_uuid, ); } $args{verify_callback}( template => \@template, valid_template => \@valid_template, status_file => $status_file ); } =head2 check_template_by_line($template, $valid_template, $errors) $template is a reference to an array containing the template to check, split into lines. $valid_template is the same for the template to check against. Lines in these arrays should not have trailing newlines. $errors is a reference to an array where error messages will be stored. Lines in $valid_template should consist of either plain strings, or strings beginning with 'qr/' (to delimit a regexp object). Returns true if the templates match and false otherwise. =cut sub check_template_by_line { my @template = @{ shift @_ }; my @valid_template = @{ shift @_ }; my $replica_uuid = shift; my $ticket_uuid = shift; my $errors = shift; for my $valid_line (@valid_template) { my $line = shift @template; push @$errors, "got nothing, expected [$valid_line]" if !defined($line); push @$errors, "[$line] doesn't match [$valid_line]" if ($valid_line =~ /^qr\//) ? $line !~ eval($valid_line) : $line eq $valid_line; } return !(@$errors == 0); } 1; prophet-0.750/lib/Prophet/Test/Participant.pm000066400000000000000000000122131160607302300211250ustar00rootroot00000000000000package Prophet::Test::Participant; use Any::Moose; use Prophet::Test; use Test::Exception; has name => ( is => 'rw', isa => 'Str', ); has arena => ( is => 'rw', isa => 'Prophet::Test::Arena', weak_ref => 1, ); sub BUILD { my $self = shift; as_user( $self->name, sub { call_func( [qw(init)] ) } ); as_user( $self->name, sub { call_func_ok( [qw(search --type Bug --regex .)] ) } ); } use List::Util qw(shuffle); my @CHICKEN_DO = qw(create_record create_record delete_record update_record update_record update_record update_record update_record sync_from_peer sync_from_peer noop); sub take_one_step { my $self = shift; my $action = shift || ( shuffle(@CHICKEN_DO) )[0]; my $args = shift; @_ = ( $self, $args ); goto $self->can($action); } sub _random_props { my @prop_values = qw(A B C D E); my @prop_keys = qw(1 2 3 4 5); return ( map { "--" . $prop_keys[$_] => $prop_values[$_] } ( 0 .. 4 ) ); } sub _permute_props { my %props = (@_); @props{ keys %props } = shuffle( values %props ); for ( keys %props ) { if ( int( rand(10) < 2 ) ) { delete $props{$_}; } } if ( int( rand(10) < 3 ) ) { $props{int(rand(5))+1 } = chr(rand(5)+65); } return %props; } sub noop { my $self = shift; ok( 1, $self->name . ' - NOOP' ); } sub delete_record { my $self = shift; my $args = shift; $args->{record} ||= get_random_local_record(); return undef unless ( $args->{record} ); $self->record_action( 'delete_record', $args ); call_func_ok( [ qw(delete --type Scratch --uuid), $args->{record} ] ); } sub create_record { my $self = shift; my $args = shift; @{ $args->{props} } = _random_props() unless $args->{props}; my ( $ret, $out, $err ) = call_func_ok( [ qw(create --type Scratch --), @{ $args->{props} } ] ); # ok($ret, $self->name . " created a record"); if ( $out =~ /Created\s+(.*?)\s+(\d+)\s+\((.*)\)/i ) { $args->{result} = $3; } $self->record_action( 'create_record', $args ); } sub update_record { my $self = shift; my $args = shift; $args->{record} ||= get_random_local_record(); return undef unless ( $args->{'record'} ); my ( $ok, $stdout, $stderr ) = call_func( [ qw(show --type Scratch --uuid), $args->{record} ] ); my %props = map { split( /: /, $_, 2 ) } split( /\n/, $stdout ); delete $props{id}; %{ $args->{props} } = _permute_props(%props) unless $args->{props}; %props = %{ $args->{props} }; call_func_ok( [ qw(update --type Scratch --uuid), $args->{record}, '--', map { '--' . $_ => $props{$_} } keys %props ], $self->name . " updated a record" ); $self->record_action( 'update_record', $args ); } sub sync_from_peer { my $self = shift; my $args = shift; my $from = $args->{from} ||= ( shuffle( grep { $_->name ne $self->name } $self->arena->chickens ) )[0]->name; $self->record_action( 'sync_from_peer', $args ); @_ = ( [ 'merge', '--prefer', 'to', '--from', repo_uri_for($from), '--to', repo_uri_for( $self->name ), '--force' ], $self->name . " sync from " . $from . " ran ok!" ); goto \&call_func_ok; } sub get_random_local_record { my ( $ok, $stdout, $stderr ) = call_func( [qw(search --type Scratch --regex .)] ); my $update_record = ( shuffle( map { $_ =~ /'uuid': '(\S*?)'/ } split( /\n/, $stdout ) ) )[0]; return $update_record; } sub dump_state { my $self = shift; my $cli = Prophet::CLI->new(); my $state; my $records = Prophet::Collection->new( handle => $cli->handle, type => 'Scratch' ); my $merges = Prophet::Collection->new( handle => $cli->handle, type => $Prophet::Replica::MERGETICKET_METATYPE ); my $resolutions = Prophet::Collection->new( handle => $cli->app_handle->handle->resolution_db_handle, type => '_prophet_resolution' ); $records->matching( sub {1} ); $resolutions->matching( sub {1} ); $merges->matching( sub {1} ); %{ $state->{records} } = map { $_->uuid => $_->get_props } $records->items; %{ $state->{merges} } = map { $_->uuid => $_->get_props } $merges->items; %{ $state->{resolutions} } = map { $_->uuid => $_->get_props } $resolutions->items; return $state; } sub dump_history { } sub record_action { my ( $self, $action, @arg ) = @_; $self->arena->record( $self->name, $action, @arg ); } use Test::Exception; sub call_func_ok { my @args = @_; my @ret; lives_and { @ret = call_func(@args); diag("As ".$ENV{'PROPHET_EMAIL'}. " ".join(' ',@{$args[0]})); ok( 1, join( " ", $ENV{'PROPHET_EMAIL'}, @{ $args[0] } ) ); }; return @ret; } sub call_func { Carp::cluck unless ref $_[0]; my @args = @{ shift @_ }; my $cli = Prophet::CLI->new(); my $str = ''; open my $str_fh, '>', \$str; my $old_fh = select($str_fh); my $ret; if (my $p = SVN::Pool->can('new_default')) { $p->('SVN::Pool'); }; $ret = $cli->run_one_command(@args); select($old_fh) if defined $old_fh; return ( $ret, $str, undef ); } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/UUIDGenerator.pm000066400000000000000000000045321160607302300203520ustar00rootroot00000000000000package Prophet::UUIDGenerator; use Any::Moose; use MIME::Base64::URLSafe; =head1 NAME Prophet::UUIDGenerator =head1 DESCRIPTION Creates UUIDs using L. Initially, it created v1 and v3 UUIDs; the new UUID scheme creates v4 and v5 UUIDs, instead. =head1 METHODS =head2 uuid_scheme Gets or sets the UUID scheme; if 1, then creates v1 and v3 UUIDs (for backward compatability with earlier versions of Prophet). If 2, it creates v4 and v5 UUIDs. =cut use UUID::Tiny ':std'; # uuid_scheme: 1 - v1 and v3 uuids. # 2 - v4 and v5 uuids. has uuid_scheme => ( isa => 'Int', is => 'rw' ); =head2 create_str Creates and returns v1 or v4 UUIDs, depending on L. =cut sub create_str { my $self = shift; if ($self->uuid_scheme == 1 ){ return create_uuid_as_string(UUID_V1); } elsif ($self->uuid_scheme == 2) { return create_uuid_as_string(UUID_V4); } } =head2 create_string_from_url URL Creates and returns v3 or v5 UUIDs for the given C, depending on L. =cut sub create_string_from_url { my $self = shift; my $url = shift; local $!; if ($self->uuid_scheme == 1 ){ # Yes, DNS, not URL. We screwed up when we first defined it # and it can't be safely changed once defined. create_uuid_as_string(UUID_V3, UUID_NS_DNS, $url); } elsif ($self->uuid_scheme == 2) { create_uuid_as_string(UUID_V5, UUID_NS_URL, $url); } } =head2 from_string =cut sub from_string { my $self = shift; my $str = shift; return string_to_uuid($str); } =head2 to_string =cut sub to_string { my $self = shift; my $uuid = shift; return uuid_to_string($uuid); } =head2 from_safe_b64 =cut sub from_safe_b64 { my $self = shift; my $uuid = shift; return urlsafe_b64decode($uuid); } =head2 to_safe_b64 =cut sub to_safe_b64 { my $self = shift; my $uuid = shift; return urlsafe_b64encode($self->from_string($uuid)); } =head2 version =cut sub version { my $self = shift; my $uuid = shift; return version_of_uuid($uuid); } =head2 set_uuid_scheme =cut sub set_uuid_scheme { my $self = shift; my $uuid = shift; if ( $self->version($uuid) <= 3 ) { $self->uuid_scheme(1); } else { $self->uuid_scheme(2); } } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Util.pm000066400000000000000000000064171160607302300166560ustar00rootroot00000000000000package Prophet::Util; use strict; use File::Basename; use File::Spec; use File::Path; use Params::Validate; use Cwd; =head2 updir PATH, DEPTH Strips off the filename in the given path and returns the absolute path of the remaining directory. Default depth is 1. If depth are great than 1, will go up more according to the depth value. =cut sub updir { my $self = shift; my ( $path, $depth ) = validate_pos( @_, 1, { default => 1 } ); die "depth must be positive" unless $depth > 0; my ($file, $dir, undef) = fileparse(File::Spec->rel2abs($path)); $depth-- if $file; # we stripped the file part if ($depth) { $dir = File::Spec->catdir( $dir, ( File::Spec->updir ) x $depth ); } # if $dir doesn't exists in file system, abs_path will return empty return Cwd::abs_path($dir) || $dir; } =head2 slurp FILENAME Reads in the entire file whose absolute path is given by FILENAME and returns its contents, either in a scalar or in an array of lines, depending on the context. =cut sub slurp { my $self = shift; my $abspath = shift; open (my $fh, "<", "$abspath") || die "$abspath: $!"; my @lines = <$fh>; close $fh; return wantarray ? @lines : join('',@lines); } =head2 instantiate_record class => 'record-class-name', uuid => 'record-uuid', app_handle => $self->app_handle Takes the name of a record class (must subclass L), a uuid, and an application handle and returns a new instantiated record object of the given class. =cut sub instantiate_record { my $self = shift; my %args = validate(@_, { class => 1, uuid => 1, app_handle => 1 }); die $args{class} ." is not a valid class " unless (UNIVERSAL::isa($args{class}, 'Prophet::Record')); my $object = $args{class}->new( uuid => $args{uuid}, app_handle => $args{app_handle}); return $object; } =head2 escape_utf8 REF Given a reference to a scalar, escapes special characters (currently just &, <, >, (, ), ", and ') for use in HTML and XML. Not an object routine (call as Prophet::Util::escape_utf8( \$scalar) ). =cut sub escape_utf8 { my $ref = shift; no warnings 'uninitialized'; $$ref =~ s/&/&/g; $$ref =~ s//>/g; $$ref =~ s/\(/(/g; $$ref =~ s/\)/)/g; $$ref =~ s/"/"/g; $$ref =~ s/'/'/g; } sub write_file { my $self = shift; my %args = (@_); #validate is too heavy to be called here # my %args = validate( @_, { file => 1, content => 1 } ); my ( undef, $parent, $filename ) = File::Spec->splitpath($args{file}); unless ( -d $parent ) { eval { mkpath( [$parent] ) }; if ( my $msg = $@ ) { die "Failed to create directory " . $parent . " - $msg"; } } open( my $fh, ">", $args{file} ) || die $!; print $fh scalar( $args{'content'} ) ; # can't do "||" as we die if we print 0" || die "Could not write to " . $args{'path'} . " " . $!; close $fh || die $!; } sub hashed_dir_name { my $hash = shift; return ( substr( $hash, 0, 1 ), substr( $hash, 1, 1 ), $hash ); } sub catfile { my $self = shift; # File::Spec::catfile is more correct, but # eats over 10% of prophet app runtime, # which isn't acceptable. return join('/',@_); } 1; prophet-0.750/lib/Prophet/Web/000077500000000000000000000000001160607302300161105ustar00rootroot00000000000000prophet-0.750/lib/Prophet/Web/Field.pm000066400000000000000000000033561160607302300175000ustar00rootroot00000000000000package Prophet::Web::Field; use Any::Moose; has name => ( isa => 'Str', is => 'rw' ); has record => ( isa => 'Prophet::Record', is => 'rw' ); has prop => ( isa => 'Str', is => 'rw' ); has value => ( isa => 'Str', is => 'rw' ); has label => ( isa => 'Str', is => 'rw', default => sub {''}); has id => ( isa => 'Str|Undef', is => 'rw' ); has class => ( isa => 'Str|Undef', is => 'rw' ); has value => ( isa => 'Str|Undef', is => 'rw' ); has type => ( isa => 'Str|Undef', is => 'rw', default => 'text'); sub _render_attr { my $self = shift; my $attr = shift; my $value = $self->$attr() || return ''; Prophet::Util::escape_utf8(\$value); return $attr . '="' . $value . '"'; } sub render_name { my $self = shift; $self->_render_attr('name'); } sub render_id { my $self = shift; $self->_render_attr('id'); } sub render_class { my $self = shift; $self->_render_attr('class'); } sub render_value { my $self = shift; $self->_render_attr('value'); } sub render { my $self = shift; my $output = <render_name]} @{[$self->render_class]}>@{[$self->label]} @{[$self->render_input]} EOF return $output; } sub render_input { my $self = shift; if ($self->type eq 'textarea') { my $value = $self->value() || ''; Prophet::Util::escape_utf8(\$value); return <render_name]} @{[$self->render_id]} @{[$self->render_class]} >@{[$value]} EOF } else { return <render_name]} @{[$self->render_id]} @{[$self->render_class]} @{[$self->render_value]} /> EOF } } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Web/FunctionResult.pm000066400000000000000000000013131160607302300214300ustar00rootroot00000000000000package Prophet::Web::FunctionResult; use Any::Moose; =head1 NAME =head1 METHODS =head1 DESCRIPTION =cut =head1 METHODS =cut has class => ( isa => 'Str', is => 'rw'); has function_name => ( isa => 'Str', is => 'rw'); has record_uuid => (isa => 'Str|Undef', is => 'rw'); has success => (isa => 'Bool', is => 'rw'); has message => (isa => 'Str', is => 'rw'); has result => ( is => 'rw', isa => 'HashRef', default => sub { {} }, ); sub exists { exists $_[0]->result->{$_[1]} } sub items { keys %{ $_[0]->result } } sub get { $_[0]->result>{$_[1]} } sub set { $_[0]->result->{$_[1]} = $_[2] } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Web/Menu.pm000066400000000000000000000173111160607302300173550ustar00rootroot00000000000000package Prophet::Web::Menu; use Any::Moose; use URI; has cgi => (isa =>'CGI', is=>'ro'); has label => ( isa => 'Str', is => 'rw'); has parent => ( isa => 'Prophet::Web::Menu|Undef', is => 'rw', weak_ref => 1); has sort_order => ( isa => 'Str', is => 'rw'); has render_children_inline => ( isa => 'Bool', is => 'rw', default => 0); has url => ( isa => 'Str', is => 'bare'); has target => ( isa => 'Str', is => 'rw'); has class => ( isa => 'Str', is => 'rw'); has escape_label => ( isa => 'Bool', is => 'rw'); has server => (isa => 'Prophet::Server', is => 'ro', weak_ref => 1, ); =head1 NAME Prophet:Web::Menu - Handle the API for menu navigation =head1 METHODS =head2 new PARAMHASH Creates a new L object. Possible keys in the I are C and no C, renders just the label. =cut sub as_link { my $self = shift; if ( $self->url ) { my $label = $self->label; Prophet::Util::escape_utf8(\$label) if ($self->escape_label); return qq{target ? qq{ target="@{[$self->target]}" } : '' ) . ( $self->class ? qq{ class="@{[$self->class]}" } : '' ) . ">". $label . '' ; } else { return $self->label; } } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/lib/Prophet/Web/Result.pm000066400000000000000000000011741160607302300177270ustar00rootroot00000000000000package Prophet::Web::Result; use Any::Moose; use Prophet::Web::FunctionResult; =head1 NAME Prophet::Web::Result =head1 METHODS =head1 DESCRIPTION =cut =head1 METHODS =cut has success => ( isa => 'Bool', is => 'rw'); has message => ( isa => 'Str', is => 'rw'); has functions => ( is => 'rw', isa => 'HashRef', default => sub { {} }, ); sub get { $_[0]->functions->{$_[1]} } sub set { $_[0]->functions->{$_[1]} = $_[2] } sub exists { exists $_[0]->functions->{$_[1]} } sub items { keys %{ $_[0]->functions } } __PACKAGE__->meta->make_immutable; no Any::Moose; 1; prophet-0.750/share/000077500000000000000000000000001160607302300143065ustar00rootroot00000000000000prophet-0.750/share/web/000077500000000000000000000000001160607302300150635ustar00rootroot00000000000000prophet-0.750/share/web/static/000077500000000000000000000000001160607302300163525ustar00rootroot00000000000000prophet-0.750/share/web/static/jquery/000077500000000000000000000000001160607302300176715ustar00rootroot00000000000000prophet-0.750/share/web/static/jquery/css/000077500000000000000000000000001160607302300204615ustar00rootroot00000000000000prophet-0.750/share/web/static/jquery/css/indicator.gif000066400000000000000000000062441160607302300231320ustar00rootroot00000000000000GIF89a óÿÿÿÆÆÆ„„„¶¶¶ššš666VVVØØØäää¼¼¼!ÿ NETSCAPE2.0!þBuilt with GIF Movie Gear 4.0!þMade by AjaxLoad.info!ù , çÈIia¥êÍçbK…$F £RA”T²,¥2Sâ*05//Ém¢p!z“ÁÌ0;$Å0Cœ.I*!üHC(A@oƒ!39T5º\Ñ8)¨ ‡ `Áî´²dwxG=Y gƒwHb†vA=’0 V\œ\ˆ; ¤¥œŸ;¥ª›H¨Š¢«¬³˜0¶µt%‘Hs‰‹rY'¶•¤eõ$ˆ™"ˆ\‡#E1Cn€ÄŽƒÉ~×ÕJ,Ü,Aa•ªUw^4I%PÝÞu Q33{0i1T…Ggwy}%ˆ%'Rœ  ¡  Ž…Œ=¡ª ¦§« £¥§3ž¶G–%¹”pº½0¦ ™³JRo…5Ȇ0IĦmykˆÃxÍTˆ_}È(…×^ââyKŽsµìœé>i_¨%ŽÕînú=ÙâÚÊqØ4eÍ-M¤D!ù , îÈI)*¨êÍ')E¥d]•¦ÃPR A”:!­ûzr’‚“œbw“ %6€"G¤(d$["‡’øJ±ÀFh­aQP`p%†/BFP\cU â ?TÐtW/pG&OtDa_sylD'M˜™ œq Štc˜¥ ¡¢™¦ b ¢2š±D“M :‘µ¹¹ d¡ˆ%À ¢4%s) À»‘uƒƒE3¸ £YU€‹tÚ–ææ‘ÆDŠ$æJiMí<àYà;ŠØ°€˜d<“ O‚tXò^%o/rvl9'L’“–—;†‡9—Ÿ›œ“ ™…£”ª9€% ‹i9³³¨ C¹ "†B B¹µDs Î^Xf}$P × {LÞ?PÊ ”›O4 ×ÐÀEÓóå’›Vë$¸æÊdJÐ#)…pVÂÀ$!ù , ëÈIiR©êͧ"J…d]• ¡RÂZN‰*P*«á;Õ$P{*”N‚Àí\EÐò!1UO2ÝD ™_r6Iöb ¡¤åÔÄH—š8 B—; ²¬"'ªœZÛÚt½’b€K#C'Kˆ‰Œw}?ˆ”‘’K•iz6Š :x‚KAC¨¨Ÿ&}9®tz\ \¶¨ªD5;x ¹¨±Qd( Ö Ë ±KWÖ Ê ŠMBàˈI´éÚˆM=ñˤsøâ¸½8Daƒ¡J`@LG!ù , ïÈIiR©êͧ"J…d]• ¡RÂZN‰*P*«á;Õ$P{*”N‚Àí\EÐò!1UO2ÝD ™_r6Iöb ¡¤åÔÄH—š8 B—; ²¬"'ªœZÛÚt½’b€K#C'Kˆ‰Gziz6ˆ8}z‰’”~Š›%X„K9ƒ:¢¨0}¤% ¨tz\B±¤lc L¢bQ Ç Ñ ÅˆÎ Ñ Ålj ÎÂÝųKÎÞè¸Åˆä ìçÒéÅxÎ(È›PàšX ,ÈéÖ‚|/"!ù , ðÈIiR©êͧ"J…d]• ¡RÂZN‰*P*«á;Õ$P{*”N‚Àí\EÐò!1UO2ÝD ™_r6Iöb ¡¤åÔÄH—š8 B—; ²¬"'ªœZÛÚt½’b€K#C'Kˆ‰Gziz6ˆ8}z‰’”~Š›%…:„A/ C} ¦¦†u\¯ ³h}b¥¦ DÂÃ]=¦¨ ¨Ö ÈV)Ó Ö ÚŠÓÐâÈ9CÓãëDæKè õí¼K¦…¢u ·Ç*00SÊtD!ù , ëÈIiR©êͧ"J…d]• ¡RÂZN‰*P*«á;Õ$P{*”N‚Àí\EÐò!1UO2ÝD ™_r6Iöb ¡¤åÔÄH—š8 B—; ²¬"'ªœZÛÚt½’b€K#C'Kˆ‰Gz ‘‰z5 ‘— “”˜ŠŸC…: „A/ C}ªª†u\³ ·Eh}b©ª6Å[=ª¥¸¥×Wx&)Ô×ÒI9ˆÔ¬á@oCÔêT?KÞÆéØdÛïò]øÁB7¡À‚ 6ЫD!ù , èÈIiR©êͧ"J…d]• ¡RÂZN‰*P*«á;Õ$P{*”N‚Àí\EÐò!1UO2ÝD ™_r6Iö Æ€ÔÄH—š03ª³”hÕ¸ƒ›aÀ—j U {CIkmbK#‡cK‘’€8 „{a’•8™n›•˜™“¥‡‚V:ˆ/q:M ¯¯Cu€~·¸‰Eh³k®¯6 ½[_¯±ƒ6P.]ú6©ð!‡)Vˆ!ù , ðÈIiR©êͧ"J…d]U ¡RÂZN ÔJÀjøN2sK6 ±› d‹I€È)  LØH°W¡G 6 ÊKX¦ƒì ±’.6¢d°¨~z“hÙÂuur/6 X5ƒI;_†t O#E {O››ˆ9V¢£œž9£¨4ž¡©œ±›—;V–C/ €¹6»Ã˜~*½'ÊMo¸º»€nÎÇbXÂ:~]+V*ÍmåK_àOÑrKñ³N@.ê›Ñdù~ÎqЦäD¢BÖ‹ 5D;prophet-0.750/share/web/static/jquery/css/jquery.autocomplete.css000066400000000000000000000015621160607302300252160ustar00rootroot00000000000000.ac_results { border-right: 1px solid #666; border-bottom: 1px solid #666; background-color: white; overflow: hidden; z-index: 99999; } .ac_results ul { width: 100%; list-style-position: outside; list-style: none; padding: 0; margin: 0; } .ac_results li { /*margin: 0px; padding: 2px 5px; */ padding: 0.5em; cursor: default; display: block; /* if width will be 100% horizontal scrollbar will apear when scroll mode will be used */ /*width: 100%;*/ font: menu; font-size: 0.7em; /* it is very important, if line-height not setted or setted in relative units scroll will be broken in firefox */ overflow: hidden; } .ac_loading { /* background: white url('indicator.gif') right center no-repeat; */ } .ac_odd { background-color: #efefef; } .ac_even { background-color: #fefefe; } .ac_over { background-color: #ccc; color: #000; } prophet-0.750/share/web/static/jquery/css/superfish-navbar.css000066400000000000000000000044051160607302300244550ustar00rootroot00000000000000 /*** adding the class sf-navbar in addition to sf-menu creates an all-horizontal nav-bar menu ***/ .sf-navbar { background: #BDD2FF; height: 2.5em; padding-bottom: 2.5em; position: relative; } .sf-navbar li { background: #AABDE6; position: static; } .sf-navbar a { border-top: none; } .sf-navbar li ul { width: 44em; /*IE6 soils itself without this*/ } .sf-navbar li li { background: #BDD2FF; position: relative; } .sf-navbar li li ul { width: 13em; } .sf-navbar li li li { width: 100%; } .sf-navbar ul li { width: auto; float: left; } .sf-navbar a, .sf-navbar a:visited { border: none; } .sf-navbar li.current { background: #BDD2FF; } .sf-navbar li:hover, .sf-navbar li.sfHover, .sf-navbar li li.current, .sf-navbar a:focus, .sf-navbar a:hover, .sf-navbar a:active { background: #BDD2FF; } .sf-navbar ul li:hover, .sf-navbar ul li.sfHover, ul.sf-navbar ul li:hover li, ul.sf-navbar ul li.sfHover li, .sf-navbar ul a:focus, .sf-navbar ul a:hover, .sf-navbar ul a:active { background: #D1DFFF; } ul.sf-navbar li li li:hover, ul.sf-navbar li li li.sfHover, .sf-navbar li li.current li.current, .sf-navbar ul li li a:focus, .sf-navbar ul li li a:hover, .sf-navbar ul li li a:active { background: #E6EEFF; } ul.sf-navbar .current ul, ul.sf-navbar ul li:hover ul, ul.sf-navbar ul li.sfHover ul { left: 0; top: 2.5em; /* match top ul list item height */ } ul.sf-navbar .current ul ul { top: -999em; } .sf-navbar li li.current > a { font-weight: bold; } /*** point all arrows down ***/ /* point right for anchors in subs */ .sf-navbar ul .sf-sub-indicator { background-position: -10px -100px; } .sf-navbar ul a > .sf-sub-indicator { background-position: 0 -100px; } /* apply hovers to modern browsers */ .sf-navbar ul a:focus > .sf-sub-indicator, .sf-navbar ul a:hover > .sf-sub-indicator, .sf-navbar ul a:active > .sf-sub-indicator, .sf-navbar ul li:hover > a > .sf-sub-indicator, .sf-navbar ul li.sfHover > a > .sf-sub-indicator { background-position: -10px -100px; /* arrow hovers for modern browsers*/ } /*** remove shadow on first submenu ***/ .sf-navbar > li > ul { background: transparent; padding: 0; -moz-border-radius-bottomleft: 0; -moz-border-radius-topright: 0; -webkit-border-top-right-radius: 0; -webkit-border-bottom-left-radius: 0; }prophet-0.750/share/web/static/jquery/css/superfish-vertical.css000066400000000000000000000016311160607302300250130ustar00rootroot00000000000000/*** adding sf-vertical in addition to sf-menu creates a vertical menu ***/ .sf-vertical, .sf-vertical li { width: 10em; } /* this lacks ul at the start of the selector, so the styles from the main CSS file override it where needed */ .sf-vertical li:hover ul, .sf-vertical li.sfHover ul { left: 10em; /* match ul width */ top: 0; } /*** alter arrow directions ***/ .sf-vertical .sf-sub-indicator { background-position: -10px 0; } /* IE6 gets solid image only */ .sf-vertical a > .sf-sub-indicator { background-position: 0 0; } /* use translucent arrow for modern browsers*/ /* hover arrow direction for modern browsers*/ .sf-vertical a:focus > .sf-sub-indicator, .sf-vertical a:hover > .sf-sub-indicator, .sf-vertical a:active > .sf-sub-indicator, .sf-vertical li:hover > a > .sf-sub-indicator, .sf-vertical li.sfHover > a > .sf-sub-indicator { background-position: -10px 0; /* arrow hovers for modern browsers*/ }prophet-0.750/share/web/static/jquery/css/superfish.css000066400000000000000000000063331160607302300232100ustar00rootroot00000000000000 /*** ESSENTIAL STYLES ***/ .sf-menu, .sf-menu * { margin: 0; padding: 0; list-style: none; } .sf-menu { line-height: 1.0; } .sf-menu ul { position: absolute; top: -999em; width: 10em; /* left offset of submenus need to match (see below) */ } .sf-menu ul li { width: 100%; } .sf-menu li:hover { visibility: inherit; /* fixes IE7 'sticky bug' */ } .sf-menu li { float: left; position: relative; } .sf-menu a { display: block; position: relative; } .sf-menu li:hover ul, .sf-menu li.sfHover ul { left: 0; top: 2.5em; /* match top ul list item height */ z-index: 99; } ul.sf-menu li:hover li ul, ul.sf-menu li.sfHover li ul { top: -999em; } ul.sf-menu li li:hover ul, ul.sf-menu li li.sfHover ul { left: 10em; /* match ul width */ top: 0; } ul.sf-menu li li:hover li ul, ul.sf-menu li li.sfHover li ul { top: -999em; } ul.sf-menu li li li:hover ul, ul.sf-menu li li li.sfHover ul { left: 10em; /* match ul width */ top: 0; } /*** DEMO SKIN ***/ .sf-menu { float: left; margin-bottom: 1em; } .sf-menu a { border-left: 1px solid #fff; border-top: 1px solid #CFDEFF; padding: .75em 1em; text-decoration:none; } .sf-menu a, .sf-menu a:visited { /* visited pseudo selector so IE6 applies text colour*/ color: #13a; } .sf-menu li { background: #BDD2FF; } .sf-menu li li { background: #AABDE6; } .sf-menu li li li { background: #9AAEDB; } .sf-menu li:hover, .sf-menu li.sfHover, .sf-menu a:focus, .sf-menu a:hover, .sf-menu a:active { background: #CFDEFF; outline: 0; } /*** arrows **/ .sf-menu a.sf-with-ul { padding-right: 2.25em; min-width: 1px; /* trigger IE7 hasLayout so spans position accurately */ } .sf-sub-indicator { position: absolute; display: block; right: .75em; top: 1.05em; /* IE6 only */ width: 10px; height: 10px; text-indent: -999em; overflow: hidden; background: url('../images/arrows-ffffff.png') no-repeat -10px -100px; /* 8-bit indexed alpha png. IE6 gets solid image only */ } a > .sf-sub-indicator { /* give all except IE6 the correct values */ top: .8em; background-position: 0 -100px; /* use translucent arrow for modern browsers*/ } /* apply hovers to modern browsers */ a:focus > .sf-sub-indicator, a:hover > .sf-sub-indicator, a:active > .sf-sub-indicator, li:hover > a > .sf-sub-indicator, li.sfHover > a > .sf-sub-indicator { background-position: -10px -100px; /* arrow hovers for modern browsers*/ } /* point right for anchors in subs */ .sf-menu ul .sf-sub-indicator { background-position: -10px 0; } .sf-menu ul a > .sf-sub-indicator { background-position: 0 0; } /* apply hovers to modern browsers */ .sf-menu ul a:focus > .sf-sub-indicator, .sf-menu ul a:hover > .sf-sub-indicator, .sf-menu ul a:active > .sf-sub-indicator, .sf-menu ul li:hover > a > .sf-sub-indicator, .sf-menu ul li.sfHover > a > .sf-sub-indicator { background-position: -10px 0; /* arrow hovers for modern browsers*/ } /*** shadows for all but IE6 ***/ .sf-shadow ul { background: url('../images/shadow.png') no-repeat bottom right; padding: 0 8px 9px 0; -moz-border-radius-bottomleft: 17px; -moz-border-radius-topright: 17px; -webkit-border-top-right-radius: 17px; -webkit-border-bottom-left-radius: 17px; } .sf-shadow ul.sf-shadow-off { background: transparent; } prophet-0.750/share/web/static/jquery/css/tablesorter/000077500000000000000000000000001160607302300230075ustar00rootroot00000000000000prophet-0.750/share/web/static/jquery/css/tablesorter/asc.gif000066400000000000000000000000661160607302300242460ustar00rootroot00000000000000GIF89a€#-0ÿÿÿ!ù, Œ  èÏÚ›gÑk$-;prophet-0.750/share/web/static/jquery/css/tablesorter/bg.gif000066400000000000000000000001001160607302300240550ustar00rootroot00000000000000GIF89a €#-0ÿÿÿ!ù, Œ€Ë ÚbxT2Š­ÞW>e`÷Œ‰U;prophet-0.750/share/web/static/jquery/css/tablesorter/desc.gif000066400000000000000000000000661160607302300244160ustar00rootroot00000000000000GIF89a€#-0ÿÿÿ!ù, ŒÉ­°œƒT2ŠY;prophet-0.750/share/web/static/jquery/css/tablesorter/style.css000066400000000000000000000016201160607302300246600ustar00rootroot00000000000000/* tables */ table.tablesorter { font-family:arial; background-color: #CDCDCD; margin:10px 0pt 15px; font-size: 8pt; width: 100%; text-align: left; } table.tablesorter thead tr th, table.tablesorter tfoot tr th { background-color: #e6EEEE; border: 1px solid #FFF; font-size: 8pt; padding: 4px; } table.tablesorter thead tr .header { background-image: url(bg.gif); background-repeat: no-repeat; background-position: center right; cursor: pointer; } table.tablesorter tbody td { color: #3D3D3D; padding: 4px; background-color: #FFF; vertical-align: top; } table.tablesorter tbody tr.odd td { background-color:#F0F0F6; } table.tablesorter thead tr .headerSortUp { background-image: url(asc.gif); } table.tablesorter thead tr .headerSortDown { background-image: url(desc.gif); } table.tablesorter thead tr .headerSortDown, table.tablesorter thead tr .headerSortUp { background-color: #8dbdd8; } prophet-0.750/share/web/static/jquery/images/000077500000000000000000000000001160607302300211365ustar00rootroot00000000000000prophet-0.750/share/web/static/jquery/images/arrows-cccccc.png000066400000000000000000000004311160607302300243560ustar00rootroot00000000000000‰PNG  IHDRn'03ª pHYs  ÒÝ~üËIDAThíšAÂ0 [Þš?ðþ¿BdË€J"$“CÅ«q4=ºÛöËUk=á·Üµ—;½V¡o@5X¦Àh8 K)?­–Û-ÓÎÀM³0»¥¿W¾®CØ`À0€ `À0€ `À0€ `ÀÀ÷îƒæ#S[ˆ[®û÷€µöÉõçÍ#hËíT‡ av‹ *˜êO@DhSF+B ö¨$ÏMÊ~¡~ø÷›ŠhK^VÖ„IEND®B`‚prophet-0.750/share/web/static/jquery/images/arrows-ffffff.png000066400000000000000000000003641160607302300244050ustar00rootroot00000000000000‰PNG  IHDRnZ^Û9sBITÛáOà PLTEÿÿÿÿÿÿÿÿÿŽJåþtRNSwÿ· Äb pHYs  ÒÝ~ütEXtSoftwareAdobe Fireworks CS3˜ÖFKIDAT(‘c`F éÀ $´€dƒVc(ƒÖ"QV? Fñ@44hÕ*ÖP‡U+ éŠ!„«A€ E!^‡ 5pÆþIEND®B`‚prophet-0.750/share/web/static/jquery/images/shadow.png000066400000000000000000000032421160607302300231320ustar00rootroot00000000000000‰PNG  IHDRž\c½ ¦tEXtSoftwareAdobe ImageReadyqÉe<DIDATxÚíÚËnÛ0EÑC‹JòÿÛêÁDÉ”ãfÐa±Ø<°q¯¥”–ïJ€ÑÛNÚ=ÝÀ =‹©C:%Ï?àíð)­Ýã)G4缆ÓúIÉÑO¶G?Ï„€g6{?­¤ñ”cæ<òÈ”š)S8ãÙ³eËš$Ùå­^?y™2gΜÚóŽxölY³ô*Z®µ­ôÉS3ç3Ÿ™S͸Í5K×§’¤½NžÏ|å#s&ñÀÏ–%SÒ—·^Fíé””L©™ó‘¯Ì7¸-mSÒ:jiI½ )ý†Áœ9}qŽ¥íLçê¢ 7 žwܦTñÀO²eä kÛ3Ÿr{Þ´äöüóꢾ|±Ü¿ñpeà߈Äâñ€xñ€x@<  ˆÄâÄâñ€xñ€x@<  ˆÄâÄâñ€xñ€x@<  ˆÄâÄâñ€xñ€x@<  ˆÄâÄâñ€xñ€x@<  ˆÄâÄâñ€x@<€x@<  ˆÄâÄâñ€x@<€x@<  ˆÄâÄâñ€x@<€x@<  ˆÄâÄâñ€x@<€x@<  ˆÄâÄâñ€x@<€x@< ˆˆÄâÄâñ€x@<€x@< ˆˆÄâÄâñ€x@<€x@< ˆˆÄâÄâñ€x@<€x@< ˆˆÄâÄâñ€x@<€x@< ˆˆÄâñ¸ ˆÄˆÄâñ€xñ€x@<  ˆÄˆÄâñ€xñ€x@<  ˆÄˆÄâñ€xñ€x@<  ˆÄˆÄâñ€xñ€x@<  ˆÄˆÄâñ€xñ€x@<  ˆÄâÄâñ€xñ€x@<  ˆÄâÄâñ€xñ€x@<  ˆÄâÄâñ€xñ€x@<  ˆÄâÄâñ€xñ€x@<  ˆÄâÄâñ€x@<€x@<  ˆÄâÄâñ€x@<€x@<  ˆÄâÄâñ€x@<€x@<  ˆÄâÄâñ€x@<€x@<  ˆÄâÄâñ€x@<€x@< ˆˆÄâÄâñ€x@<€x@< ˆˆÄâÄâñ€x@<€x@< ˆˆÄâÄâñ€x@<€x@< ˆˆÄâÄâñ€x@<€x@< ˆˆÄâñ¸ ˆÄˆÄâñ€xñ€x@<  ˆÄˆÄâñ€xñ€x@<  ˆÄˆÄâñ€xñ€x@<  ˆÄˆÄâñ€xñ€x@<  ˆÄˆÄâñ€xñ€x@<  ˆÄâÄâñ€xñ€x@<  ˆÄâÄâñ€xñ€x@<  ˆÄâÄâñ€xñ€x@<  ˆÄâÄâñÀÿ¤¾|nÃ~h¢¾ùÊž={’âºÁUÄ·„ê-={¶lY“<Ä=ž5[¶!¡#žv6Ò²gËš%S’M<0”±dÉš-û™Në“ç¨iLgJô:¶,ùÝó¹fO}©ë‘ôWñÀÏž5K~egOMÒ®½®$Ù³d²´ÁËâ¶öÕíúÝ3NžãuëéˆÆÙsÜJ»Ož–Ò’!žÒð¼'pÞ²ÞÓÒZrt“”’\Ñïg>ã“У3ž¤äŠF8ð·€ÎrÆxz3Šy÷/l ‚ïcçÕ}kï—f¿áIEND®B`‚prophet-0.750/share/web/static/jquery/js/000077500000000000000000000000001160607302300203055ustar00rootroot00000000000000prophet-0.750/share/web/static/jquery/js/hoverIntent.js000066400000000000000000000061461160607302300231570ustar00rootroot00000000000000(function($){ /* hoverIntent by Brian Cherne */ $.fn.hoverIntent = function(f,g) { // default configuration options var cfg = { sensitivity: 7, interval: 100, timeout: 0 }; // override configuration options with user supplied object cfg = $.extend(cfg, g ? { over: f, out: g } : f ); // instantiate variables // cX, cY = current X and Y position of mouse, updated by mousemove event // pX, pY = previous X and Y position of mouse, set by mouseover and polling interval var cX, cY, pX, pY; // A private function for getting mouse position var track = function(ev) { cX = ev.pageX; cY = ev.pageY; }; // A private function for comparing current and previous mouse position var compare = function(ev,ob) { ob.hoverIntent_t = clearTimeout(ob.hoverIntent_t); // compare mouse positions to see if they've crossed the threshold if ( ( Math.abs(pX-cX) + Math.abs(pY-cY) ) < cfg.sensitivity ) { $(ob).unbind("mousemove",track); // set hoverIntent state to true (so mouseOut can be called) ob.hoverIntent_s = 1; return cfg.over.apply(ob,[ev]); } else { // set previous coordinates for next time pX = cX; pY = cY; // use self-calling timeout, guarantees intervals are spaced out properly (avoids JavaScript timer bugs) ob.hoverIntent_t = setTimeout( function(){compare(ev, ob);} , cfg.interval ); } }; // A private function for delaying the mouseOut function var delay = function(ev,ob) { ob.hoverIntent_t = clearTimeout(ob.hoverIntent_t); ob.hoverIntent_s = 0; return cfg.out.apply(ob,[ev]); }; // A private function for handling mouse 'hovering' var handleHover = function(e) { // next three lines copied from jQuery.hover, ignore children onMouseOver/onMouseOut var p = (e.type == "mouseover" ? e.fromElement : e.toElement) || e.relatedTarget; while ( p && p != this ) { try { p = p.parentNode; } catch(e) { p = this; } } if ( p == this ) { return false; } // copy objects to be passed into t (required for event object to be passed in IE) var ev = jQuery.extend({},e); var ob = this; // cancel hoverIntent timer if it exists if (ob.hoverIntent_t) { ob.hoverIntent_t = clearTimeout(ob.hoverIntent_t); } // else e.type == "onmouseover" if (e.type == "mouseover") { // set "previous" X and Y position based on initial entry point pX = ev.pageX; pY = ev.pageY; // update "current" X and Y position based on mousemove $(ob).bind("mousemove",track); // start polling interval (self-calling timeout) to compare mouse coordinates over time if (ob.hoverIntent_s != 1) { ob.hoverIntent_t = setTimeout( function(){compare(ev,ob);} , cfg.interval );} // else e.type == "onmouseout" } else { // unbind expensive mousemove event $(ob).unbind("mousemove",track); // if hoverIntent state is true, then call the mouseOut function after the specified delay if (ob.hoverIntent_s == 1) { ob.hoverIntent_t = setTimeout( function(){delay(ev,ob);} , cfg.timeout );} } }; // bind the function to the two event listeners return this.mouseover(handleHover).mouseout(handleHover); }; })(jQuery);prophet-0.750/share/web/static/jquery/js/jquery-1.2.6.min.js000066400000000000000000001547361160607302300234260ustar00rootroot00000000000000/* * jQuery 1.2.6 - New Wave Javascript * * Copyright (c) 2008 John Resig (jquery.com) * Dual licensed under the MIT (MIT-LICENSE.txt) * and GPL (GPL-LICENSE.txt) licenses. * * $Date: 2008-05-24 14:22:17 -0400 (Sat, 24 May 2008) $ * $Rev: 5685 $ */ (function(){var _jQuery=window.jQuery,_$=window.$;var jQuery=window.jQuery=window.$=function(selector,context){return new jQuery.fn.init(selector,context);};var quickExpr=/^[^<]*(<(.|\s)+>)[^>]*$|^#(\w+)$/,isSimple=/^.[^:#\[\.]*$/,undefined;jQuery.fn=jQuery.prototype={init:function(selector,context){selector=selector||document;if(selector.nodeType){this[0]=selector;this.length=1;return this;}if(typeof selector=="string"){var match=quickExpr.exec(selector);if(match&&(match[1]||!context)){if(match[1])selector=jQuery.clean([match[1]],context);else{var elem=document.getElementById(match[3]);if(elem){if(elem.id!=match[3])return jQuery().find(selector);return jQuery(elem);}selector=[];}}else return jQuery(context).find(selector);}else if(jQuery.isFunction(selector))return jQuery(document)[jQuery.fn.ready?"ready":"load"](selector);return this.setArray(jQuery.makeArray(selector));},jquery:"1.2.6",size:function(){return this.length;},length:0,get:function(num){return num==undefined?jQuery.makeArray(this):this[num];},pushStack:function(elems){var ret=jQuery(elems);ret.prevObject=this;return ret;},setArray:function(elems){this.length=0;Array.prototype.push.apply(this,elems);return this;},each:function(callback,args){return jQuery.each(this,callback,args);},index:function(elem){var ret=-1;return jQuery.inArray(elem&&elem.jquery?elem[0]:elem,this);},attr:function(name,value,type){var options=name;if(name.constructor==String)if(value===undefined)return this[0]&&jQuery[type||"attr"](this[0],name);else{options={};options[name]=value;}return this.each(function(i){for(name in options)jQuery.attr(type?this.style:this,name,jQuery.prop(this,options[name],type,i,name));});},css:function(key,value){if((key=='width'||key=='height')&&parseFloat(value)<0)value=undefined;return this.attr(key,value,"curCSS");},text:function(text){if(typeof text!="object"&&text!=null)return this.empty().append((this[0]&&this[0].ownerDocument||document).createTextNode(text));var ret="";jQuery.each(text||this,function(){jQuery.each(this.childNodes,function(){if(this.nodeType!=8)ret+=this.nodeType!=1?this.nodeValue:jQuery.fn.text([this]);});});return ret;},wrapAll:function(html){if(this[0])jQuery(html,this[0].ownerDocument).clone().insertBefore(this[0]).map(function(){var elem=this;while(elem.firstChild)elem=elem.firstChild;return elem;}).append(this);return this;},wrapInner:function(html){return this.each(function(){jQuery(this).contents().wrapAll(html);});},wrap:function(html){return this.each(function(){jQuery(this).wrapAll(html);});},append:function(){return this.domManip(arguments,true,false,function(elem){if(this.nodeType==1)this.appendChild(elem);});},prepend:function(){return this.domManip(arguments,true,true,function(elem){if(this.nodeType==1)this.insertBefore(elem,this.firstChild);});},before:function(){return this.domManip(arguments,false,false,function(elem){this.parentNode.insertBefore(elem,this);});},after:function(){return this.domManip(arguments,false,true,function(elem){this.parentNode.insertBefore(elem,this.nextSibling);});},end:function(){return this.prevObject||jQuery([]);},find:function(selector){var elems=jQuery.map(this,function(elem){return jQuery.find(selector,elem);});return this.pushStack(/[^+>] [^+>]/.test(selector)||selector.indexOf("..")>-1?jQuery.unique(elems):elems);},clone:function(events){var ret=this.map(function(){if(jQuery.browser.msie&&!jQuery.isXMLDoc(this)){var clone=this.cloneNode(true),container=document.createElement("div");container.appendChild(clone);return jQuery.clean([container.innerHTML])[0];}else return this.cloneNode(true);});var clone=ret.find("*").andSelf().each(function(){if(this[expando]!=undefined)this[expando]=null;});if(events===true)this.find("*").andSelf().each(function(i){if(this.nodeType==3)return;var events=jQuery.data(this,"events");for(var type in events)for(var handler in events[type])jQuery.event.add(clone[i],type,events[type][handler],events[type][handler].data);});return ret;},filter:function(selector){return this.pushStack(jQuery.isFunction(selector)&&jQuery.grep(this,function(elem,i){return selector.call(elem,i);})||jQuery.multiFilter(selector,this));},not:function(selector){if(selector.constructor==String)if(isSimple.test(selector))return this.pushStack(jQuery.multiFilter(selector,this,true));else selector=jQuery.multiFilter(selector,this);var isArrayLike=selector.length&&selector[selector.length-1]!==undefined&&!selector.nodeType;return this.filter(function(){return isArrayLike?jQuery.inArray(this,selector)<0:this!=selector;});},add:function(selector){return this.pushStack(jQuery.unique(jQuery.merge(this.get(),typeof selector=='string'?jQuery(selector):jQuery.makeArray(selector))));},is:function(selector){return!!selector&&jQuery.multiFilter(selector,this).length>0;},hasClass:function(selector){return this.is("."+selector);},val:function(value){if(value==undefined){if(this.length){var elem=this[0];if(jQuery.nodeName(elem,"select")){var index=elem.selectedIndex,values=[],options=elem.options,one=elem.type=="select-one";if(index<0)return null;for(var i=one?index:0,max=one?index+1:options.length;i=0||jQuery.inArray(this.name,value)>=0);else if(jQuery.nodeName(this,"select")){var values=jQuery.makeArray(value);jQuery("option",this).each(function(){this.selected=(jQuery.inArray(this.value,values)>=0||jQuery.inArray(this.text,values)>=0);});if(!values.length)this.selectedIndex=-1;}else this.value=value;});},html:function(value){return value==undefined?(this[0]?this[0].innerHTML:null):this.empty().append(value);},replaceWith:function(value){return this.after(value).remove();},eq:function(i){return this.slice(i,i+1);},slice:function(){return this.pushStack(Array.prototype.slice.apply(this,arguments));},map:function(callback){return this.pushStack(jQuery.map(this,function(elem,i){return callback.call(elem,i,elem);}));},andSelf:function(){return this.add(this.prevObject);},data:function(key,value){var parts=key.split(".");parts[1]=parts[1]?"."+parts[1]:"";if(value===undefined){var data=this.triggerHandler("getData"+parts[1]+"!",[parts[0]]);if(data===undefined&&this.length)data=jQuery.data(this[0],key);return data===undefined&&parts[1]?this.data(parts[0]):data;}else return this.trigger("setData"+parts[1]+"!",[parts[0],value]).each(function(){jQuery.data(this,key,value);});},removeData:function(key){return this.each(function(){jQuery.removeData(this,key);});},domManip:function(args,table,reverse,callback){var clone=this.length>1,elems;return this.each(function(){if(!elems){elems=jQuery.clean(args,this.ownerDocument);if(reverse)elems.reverse();}var obj=this;if(table&&jQuery.nodeName(this,"table")&&jQuery.nodeName(elems[0],"tr"))obj=this.getElementsByTagName("tbody")[0]||this.appendChild(this.ownerDocument.createElement("tbody"));var scripts=jQuery([]);jQuery.each(elems,function(){var elem=clone?jQuery(this).clone(true)[0]:this;if(jQuery.nodeName(elem,"script"))scripts=scripts.add(elem);else{if(elem.nodeType==1)scripts=scripts.add(jQuery("script",elem).remove());callback.call(obj,elem);}});scripts.each(evalScript);});}};jQuery.fn.init.prototype=jQuery.fn;function evalScript(i,elem){if(elem.src)jQuery.ajax({url:elem.src,async:false,dataType:"script"});else jQuery.globalEval(elem.text||elem.textContent||elem.innerHTML||"");if(elem.parentNode)elem.parentNode.removeChild(elem);}function now(){return+new Date;}jQuery.extend=jQuery.fn.extend=function(){var target=arguments[0]||{},i=1,length=arguments.length,deep=false,options;if(target.constructor==Boolean){deep=target;target=arguments[1]||{};i=2;}if(typeof target!="object"&&typeof target!="function")target={};if(length==i){target=this;--i;}for(;i-1;}},swap:function(elem,options,callback){var old={};for(var name in options){old[name]=elem.style[name];elem.style[name]=options[name];}callback.call(elem);for(var name in options)elem.style[name]=old[name];},css:function(elem,name,force){if(name=="width"||name=="height"){var val,props={position:"absolute",visibility:"hidden",display:"block"},which=name=="width"?["Left","Right"]:["Top","Bottom"];function getWH(){val=name=="width"?elem.offsetWidth:elem.offsetHeight;var padding=0,border=0;jQuery.each(which,function(){padding+=parseFloat(jQuery.curCSS(elem,"padding"+this,true))||0;border+=parseFloat(jQuery.curCSS(elem,"border"+this+"Width",true))||0;});val-=Math.round(padding+border);}if(jQuery(elem).is(":visible"))getWH();else jQuery.swap(elem,props,getWH);return Math.max(0,val);}return jQuery.curCSS(elem,name,force);},curCSS:function(elem,name,force){var ret,style=elem.style;function color(elem){if(!jQuery.browser.safari)return false;var ret=defaultView.getComputedStyle(elem,null);return!ret||ret.getPropertyValue("color")=="";}if(name=="opacity"&&jQuery.browser.msie){ret=jQuery.attr(style,"opacity");return ret==""?"1":ret;}if(jQuery.browser.opera&&name=="display"){var save=style.outline;style.outline="0 solid black";style.outline=save;}if(name.match(/float/i))name=styleFloat;if(!force&&style&&style[name])ret=style[name];else if(defaultView.getComputedStyle){if(name.match(/float/i))name="float";name=name.replace(/([A-Z])/g,"-$1").toLowerCase();var computedStyle=defaultView.getComputedStyle(elem,null);if(computedStyle&&!color(elem))ret=computedStyle.getPropertyValue(name);else{var swap=[],stack=[],a=elem,i=0;for(;a&&color(a);a=a.parentNode)stack.unshift(a);for(;i]*?)\/>/g,function(all,front,tag){return tag.match(/^(abbr|br|col|img|input|link|meta|param|hr|area|embed)$/i)?all:front+">";});var tags=jQuery.trim(elem).toLowerCase(),div=context.createElement("div");var wrap=!tags.indexOf("",""]||!tags.indexOf("",""]||tags.match(/^<(thead|tbody|tfoot|colg|cap)/)&&[1,"","
"]||!tags.indexOf("",""]||(!tags.indexOf("",""]||!tags.indexOf("",""]||jQuery.browser.msie&&[1,"div
","
"]||[0,"",""];div.innerHTML=wrap[1]+elem+wrap[2];while(wrap[0]--)div=div.lastChild;if(jQuery.browser.msie){var tbody=!tags.indexOf(""&&tags.indexOf("=0;--j)if(jQuery.nodeName(tbody[j],"tbody")&&!tbody[j].childNodes.length)tbody[j].parentNode.removeChild(tbody[j]);if(/^\s/.test(elem))div.insertBefore(context.createTextNode(elem.match(/^\s*/)[0]),div.firstChild);}elem=jQuery.makeArray(div.childNodes);}if(elem.length===0&&(!jQuery.nodeName(elem,"form")&&!jQuery.nodeName(elem,"select")))return;if(elem[0]==undefined||jQuery.nodeName(elem,"form")||elem.options)ret.push(elem);else ret=jQuery.merge(ret,elem);});return ret;},attr:function(elem,name,value){if(!elem||elem.nodeType==3||elem.nodeType==8)return undefined;var notxml=!jQuery.isXMLDoc(elem),set=value!==undefined,msie=jQuery.browser.msie;name=notxml&&jQuery.props[name]||name;if(elem.tagName){var special=/href|src|style/.test(name);if(name=="selected"&&jQuery.browser.safari)elem.parentNode.selectedIndex;if(name in elem&¬xml&&!special){if(set){if(name=="type"&&jQuery.nodeName(elem,"input")&&elem.parentNode)throw"type property can't be changed";elem[name]=value;}if(jQuery.nodeName(elem,"form")&&elem.getAttributeNode(name))return elem.getAttributeNode(name).nodeValue;return elem[name];}if(msie&¬xml&&name=="style")return jQuery.attr(elem.style,"cssText",value);if(set)elem.setAttribute(name,""+value);var attr=msie&¬xml&&special?elem.getAttribute(name,2):elem.getAttribute(name);return attr===null?undefined:attr;}if(msie&&name=="opacity"){if(set){elem.zoom=1;elem.filter=(elem.filter||"").replace(/alpha\([^)]*\)/,"")+(parseInt(value)+''=="NaN"?"":"alpha(opacity="+value*100+")");}return elem.filter&&elem.filter.indexOf("opacity=")>=0?(parseFloat(elem.filter.match(/opacity=([^)]*)/)[1])/100)+'':"";}name=name.replace(/-([a-z])/ig,function(all,letter){return letter.toUpperCase();});if(set)elem[name]=value;return elem[name];},trim:function(text){return(text||"").replace(/^\s+|\s+$/g,"");},makeArray:function(array){var ret=[];if(array!=null){var i=array.length;if(i==null||array.split||array.setInterval||array.call)ret[0]=array;else while(i)ret[--i]=array[i];}return ret;},inArray:function(elem,array){for(var i=0,length=array.length;i*",this).remove();while(this.firstChild)this.removeChild(this.firstChild);}},function(name,fn){jQuery.fn[name]=function(){return this.each(fn,arguments);};});jQuery.each(["Height","Width"],function(i,name){var type=name.toLowerCase();jQuery.fn[type]=function(size){return this[0]==window?jQuery.browser.opera&&document.body["client"+name]||jQuery.browser.safari&&window["inner"+name]||document.compatMode=="CSS1Compat"&&document.documentElement["client"+name]||document.body["client"+name]:this[0]==document?Math.max(Math.max(document.body["scroll"+name],document.documentElement["scroll"+name]),Math.max(document.body["offset"+name],document.documentElement["offset"+name])):size==undefined?(this.length?jQuery.css(this[0],type):null):this.css(type,size.constructor==String?size:size+"px");};});function num(elem,prop){return elem[0]&&parseInt(jQuery.curCSS(elem[0],prop,true),10)||0;}var chars=jQuery.browser.safari&&parseInt(jQuery.browser.version)<417?"(?:[\\w*_-]|\\\\.)":"(?:[\\w\u0128-\uFFFF*_-]|\\\\.)",quickChild=new RegExp("^>\\s*("+chars+"+)"),quickID=new RegExp("^("+chars+"+)(#)("+chars+"+)"),quickClass=new RegExp("^([#.]?)("+chars+"*)");jQuery.extend({expr:{"":function(a,i,m){return m[2]=="*"||jQuery.nodeName(a,m[2]);},"#":function(a,i,m){return a.getAttribute("id")==m[2];},":":{lt:function(a,i,m){return im[3]-0;},nth:function(a,i,m){return m[3]-0==i;},eq:function(a,i,m){return m[3]-0==i;},first:function(a,i){return i==0;},last:function(a,i,m,r){return i==r.length-1;},even:function(a,i){return i%2==0;},odd:function(a,i){return i%2;},"first-child":function(a){return a.parentNode.getElementsByTagName("*")[0]==a;},"last-child":function(a){return jQuery.nth(a.parentNode.lastChild,1,"previousSibling")==a;},"only-child":function(a){return!jQuery.nth(a.parentNode.lastChild,2,"previousSibling");},parent:function(a){return a.firstChild;},empty:function(a){return!a.firstChild;},contains:function(a,i,m){return(a.textContent||a.innerText||jQuery(a).text()||"").indexOf(m[3])>=0;},visible:function(a){return"hidden"!=a.type&&jQuery.css(a,"display")!="none"&&jQuery.css(a,"visibility")!="hidden";},hidden:function(a){return"hidden"==a.type||jQuery.css(a,"display")=="none"||jQuery.css(a,"visibility")=="hidden";},enabled:function(a){return!a.disabled;},disabled:function(a){return a.disabled;},checked:function(a){return a.checked;},selected:function(a){return a.selected||jQuery.attr(a,"selected");},text:function(a){return"text"==a.type;},radio:function(a){return"radio"==a.type;},checkbox:function(a){return"checkbox"==a.type;},file:function(a){return"file"==a.type;},password:function(a){return"password"==a.type;},submit:function(a){return"submit"==a.type;},image:function(a){return"image"==a.type;},reset:function(a){return"reset"==a.type;},button:function(a){return"button"==a.type||jQuery.nodeName(a,"button");},input:function(a){return/input|select|textarea|button/i.test(a.nodeName);},has:function(a,i,m){return jQuery.find(m[3],a).length;},header:function(a){return/h\d/i.test(a.nodeName);},animated:function(a){return jQuery.grep(jQuery.timers,function(fn){return a==fn.elem;}).length;}}},parse:[/^(\[) *@?([\w-]+) *([!*$^~=]*) *('?"?)(.*?)\4 *\]/,/^(:)([\w-]+)\("?'?(.*?(\(.*?\))?[^(]*?)"?'?\)/,new RegExp("^([:.#]*)("+chars+"+)")],multiFilter:function(expr,elems,not){var old,cur=[];while(expr&&expr!=old){old=expr;var f=jQuery.filter(expr,elems,not);expr=f.t.replace(/^\s*,\s*/,"");cur=not?elems=f.r:jQuery.merge(cur,f.r);}return cur;},find:function(t,context){if(typeof t!="string")return[t];if(context&&context.nodeType!=1&&context.nodeType!=9)return[];context=context||document;var ret=[context],done=[],last,nodeName;while(t&&last!=t){var r=[];last=t;t=jQuery.trim(t);var foundToken=false,re=quickChild,m=re.exec(t);if(m){nodeName=m[1].toUpperCase();for(var i=0;ret[i];i++)for(var c=ret[i].firstChild;c;c=c.nextSibling)if(c.nodeType==1&&(nodeName=="*"||c.nodeName.toUpperCase()==nodeName))r.push(c);ret=r;t=t.replace(re,"");if(t.indexOf(" ")==0)continue;foundToken=true;}else{re=/^([>+~])\s*(\w*)/i;if((m=re.exec(t))!=null){r=[];var merge={};nodeName=m[2].toUpperCase();m=m[1];for(var j=0,rl=ret.length;j=0;if(!not&&pass||not&&!pass)tmp.push(r[i]);}return tmp;},filter:function(t,r,not){var last;while(t&&t!=last){last=t;var p=jQuery.parse,m;for(var i=0;p[i];i++){m=p[i].exec(t);if(m){t=t.substring(m[0].length);m[2]=m[2].replace(/\\/g,"");break;}}if(!m)break;if(m[1]==":"&&m[2]=="not")r=isSimple.test(m[3])?jQuery.filter(m[3],r,true).r:jQuery(r).not(m[3]);else if(m[1]==".")r=jQuery.classFilter(r,m[2],not);else if(m[1]=="["){var tmp=[],type=m[3];for(var i=0,rl=r.length;i=0)^not)tmp.push(a);}r=tmp;}else if(m[1]==":"&&m[2]=="nth-child"){var merge={},tmp=[],test=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(m[3]=="even"&&"2n"||m[3]=="odd"&&"2n+1"||!/\D/.test(m[3])&&"0n+"+m[3]||m[3]),first=(test[1]+(test[2]||1))-0,last=test[3]-0;for(var i=0,rl=r.length;i=0)add=true;if(add^not)tmp.push(node);}r=tmp;}else{var fn=jQuery.expr[m[1]];if(typeof fn=="object")fn=fn[m[2]];if(typeof fn=="string")fn=eval("false||function(a,i){return "+fn+";}");r=jQuery.grep(r,function(elem,i){return fn(elem,i,m,r);},not);}}return{r:r,t:t};},dir:function(elem,dir){var matched=[],cur=elem[dir];while(cur&&cur!=document){if(cur.nodeType==1)matched.push(cur);cur=cur[dir];}return matched;},nth:function(cur,result,dir,elem){result=result||1;var num=0;for(;cur;cur=cur[dir])if(cur.nodeType==1&&++num==result)break;return cur;},sibling:function(n,elem){var r=[];for(;n;n=n.nextSibling){if(n.nodeType==1&&n!=elem)r.push(n);}return r;}});jQuery.event={add:function(elem,types,handler,data){if(elem.nodeType==3||elem.nodeType==8)return;if(jQuery.browser.msie&&elem.setInterval)elem=window;if(!handler.guid)handler.guid=this.guid++;if(data!=undefined){var fn=handler;handler=this.proxy(fn,function(){return fn.apply(this,arguments);});handler.data=data;}var events=jQuery.data(elem,"events")||jQuery.data(elem,"events",{}),handle=jQuery.data(elem,"handle")||jQuery.data(elem,"handle",function(){if(typeof jQuery!="undefined"&&!jQuery.event.triggered)return jQuery.event.handle.apply(arguments.callee.elem,arguments);});handle.elem=elem;jQuery.each(types.split(/\s+/),function(index,type){var parts=type.split(".");type=parts[0];handler.type=parts[1];var handlers=events[type];if(!handlers){handlers=events[type]={};if(!jQuery.event.special[type]||jQuery.event.special[type].setup.call(elem)===false){if(elem.addEventListener)elem.addEventListener(type,handle,false);else if(elem.attachEvent)elem.attachEvent("on"+type,handle);}}handlers[handler.guid]=handler;jQuery.event.global[type]=true;});elem=null;},guid:1,global:{},remove:function(elem,types,handler){if(elem.nodeType==3||elem.nodeType==8)return;var events=jQuery.data(elem,"events"),ret,index;if(events){if(types==undefined||(typeof types=="string"&&types.charAt(0)=="."))for(var type in events)this.remove(elem,type+(types||""));else{if(types.type){handler=types.handler;types=types.type;}jQuery.each(types.split(/\s+/),function(index,type){var parts=type.split(".");type=parts[0];if(events[type]){if(handler)delete events[type][handler.guid];else for(handler in events[type])if(!parts[1]||events[type][handler].type==parts[1])delete events[type][handler];for(ret in events[type])break;if(!ret){if(!jQuery.event.special[type]||jQuery.event.special[type].teardown.call(elem)===false){if(elem.removeEventListener)elem.removeEventListener(type,jQuery.data(elem,"handle"),false);else if(elem.detachEvent)elem.detachEvent("on"+type,jQuery.data(elem,"handle"));}ret=null;delete events[type];}}});}for(ret in events)break;if(!ret){var handle=jQuery.data(elem,"handle");if(handle)handle.elem=null;jQuery.removeData(elem,"events");jQuery.removeData(elem,"handle");}}},trigger:function(type,data,elem,donative,extra){data=jQuery.makeArray(data);if(type.indexOf("!")>=0){type=type.slice(0,-1);var exclusive=true;}if(!elem){if(this.global[type])jQuery("*").add([window,document]).trigger(type,data);}else{if(elem.nodeType==3||elem.nodeType==8)return undefined;var val,ret,fn=jQuery.isFunction(elem[type]||null),event=!data[0]||!data[0].preventDefault;if(event){data.unshift({type:type,target:elem,preventDefault:function(){},stopPropagation:function(){},timeStamp:now()});data[0][expando]=true;}data[0].type=type;if(exclusive)data[0].exclusive=true;var handle=jQuery.data(elem,"handle");if(handle)val=handle.apply(elem,data);if((!fn||(jQuery.nodeName(elem,'a')&&type=="click"))&&elem["on"+type]&&elem["on"+type].apply(elem,data)===false)val=false;if(event)data.shift();if(extra&&jQuery.isFunction(extra)){ret=extra.apply(elem,val==null?data:data.concat(val));if(ret!==undefined)val=ret;}if(fn&&donative!==false&&val!==false&&!(jQuery.nodeName(elem,'a')&&type=="click")){this.triggered=true;try{elem[type]();}catch(e){}}this.triggered=false;}return val;},handle:function(event){var val,ret,namespace,all,handlers;event=arguments[0]=jQuery.event.fix(event||window.event);namespace=event.type.split(".");event.type=namespace[0];namespace=namespace[1];all=!namespace&&!event.exclusive;handlers=(jQuery.data(this,"events")||{})[event.type];for(var j in handlers){var handler=handlers[j];if(all||handler.type==namespace){event.handler=handler;event.data=handler.data;ret=handler.apply(this,arguments);if(val!==false)val=ret;if(ret===false){event.preventDefault();event.stopPropagation();}}}return val;},fix:function(event){if(event[expando]==true)return event;var originalEvent=event;event={originalEvent:originalEvent};var props="altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode metaKey newValue originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target timeStamp toElement type view wheelDelta which".split(" ");for(var i=props.length;i;i--)event[props[i]]=originalEvent[props[i]];event[expando]=true;event.preventDefault=function(){if(originalEvent.preventDefault)originalEvent.preventDefault();originalEvent.returnValue=false;};event.stopPropagation=function(){if(originalEvent.stopPropagation)originalEvent.stopPropagation();originalEvent.cancelBubble=true;};event.timeStamp=event.timeStamp||now();if(!event.target)event.target=event.srcElement||document;if(event.target.nodeType==3)event.target=event.target.parentNode;if(!event.relatedTarget&&event.fromElement)event.relatedTarget=event.fromElement==event.target?event.toElement:event.fromElement;if(event.pageX==null&&event.clientX!=null){var doc=document.documentElement,body=document.body;event.pageX=event.clientX+(doc&&doc.scrollLeft||body&&body.scrollLeft||0)-(doc.clientLeft||0);event.pageY=event.clientY+(doc&&doc.scrollTop||body&&body.scrollTop||0)-(doc.clientTop||0);}if(!event.which&&((event.charCode||event.charCode===0)?event.charCode:event.keyCode))event.which=event.charCode||event.keyCode;if(!event.metaKey&&event.ctrlKey)event.metaKey=event.ctrlKey;if(!event.which&&event.button)event.which=(event.button&1?1:(event.button&2?3:(event.button&4?2:0)));return event;},proxy:function(fn,proxy){proxy.guid=fn.guid=fn.guid||proxy.guid||this.guid++;return proxy;},special:{ready:{setup:function(){bindReady();return;},teardown:function(){return;}},mouseenter:{setup:function(){if(jQuery.browser.msie)return false;jQuery(this).bind("mouseover",jQuery.event.special.mouseenter.handler);return true;},teardown:function(){if(jQuery.browser.msie)return false;jQuery(this).unbind("mouseover",jQuery.event.special.mouseenter.handler);return true;},handler:function(event){if(withinElement(event,this))return true;event.type="mouseenter";return jQuery.event.handle.apply(this,arguments);}},mouseleave:{setup:function(){if(jQuery.browser.msie)return false;jQuery(this).bind("mouseout",jQuery.event.special.mouseleave.handler);return true;},teardown:function(){if(jQuery.browser.msie)return false;jQuery(this).unbind("mouseout",jQuery.event.special.mouseleave.handler);return true;},handler:function(event){if(withinElement(event,this))return true;event.type="mouseleave";return jQuery.event.handle.apply(this,arguments);}}}};jQuery.fn.extend({bind:function(type,data,fn){return type=="unload"?this.one(type,data,fn):this.each(function(){jQuery.event.add(this,type,fn||data,fn&&data);});},one:function(type,data,fn){var one=jQuery.event.proxy(fn||data,function(event){jQuery(this).unbind(event,one);return(fn||data).apply(this,arguments);});return this.each(function(){jQuery.event.add(this,type,one,fn&&data);});},unbind:function(type,fn){return this.each(function(){jQuery.event.remove(this,type,fn);});},trigger:function(type,data,fn){return this.each(function(){jQuery.event.trigger(type,data,this,true,fn);});},triggerHandler:function(type,data,fn){return this[0]&&jQuery.event.trigger(type,data,this[0],false,fn);},toggle:function(fn){var args=arguments,i=1;while(i=0){var selector=url.slice(off,url.length);url=url.slice(0,off);}callback=callback||function(){};var type="GET";if(params)if(jQuery.isFunction(params)){callback=params;params=null;}else{params=jQuery.param(params);type="POST";}var self=this;jQuery.ajax({url:url,type:type,dataType:"html",data:params,complete:function(res,status){if(status=="success"||status=="notmodified")self.html(selector?jQuery("
").append(res.responseText.replace(//g,"")).find(selector):res.responseText);self.each(callback,[res.responseText,status,res]);}});return this;},serialize:function(){return jQuery.param(this.serializeArray());},serializeArray:function(){return this.map(function(){return jQuery.nodeName(this,"form")?jQuery.makeArray(this.elements):this;}).filter(function(){return this.name&&!this.disabled&&(this.checked||/select|textarea/i.test(this.nodeName)||/text|hidden|password/i.test(this.type));}).map(function(i,elem){var val=jQuery(this).val();return val==null?null:val.constructor==Array?jQuery.map(val,function(val,i){return{name:elem.name,value:val};}):{name:elem.name,value:val};}).get();}});jQuery.each("ajaxStart,ajaxStop,ajaxComplete,ajaxError,ajaxSuccess,ajaxSend".split(","),function(i,o){jQuery.fn[o]=function(f){return this.bind(o,f);};});var jsc=now();jQuery.extend({get:function(url,data,callback,type){if(jQuery.isFunction(data)){callback=data;data=null;}return jQuery.ajax({type:"GET",url:url,data:data,success:callback,dataType:type});},getScript:function(url,callback){return jQuery.get(url,null,callback,"script");},getJSON:function(url,data,callback){return jQuery.get(url,data,callback,"json");},post:function(url,data,callback,type){if(jQuery.isFunction(data)){callback=data;data={};}return jQuery.ajax({type:"POST",url:url,data:data,success:callback,dataType:type});},ajaxSetup:function(settings){jQuery.extend(jQuery.ajaxSettings,settings);},ajaxSettings:{url:location.href,global:true,type:"GET",timeout:0,contentType:"application/x-www-form-urlencoded",processData:true,async:true,data:null,username:null,password:null,accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},ajax:function(s){s=jQuery.extend(true,s,jQuery.extend(true,{},jQuery.ajaxSettings,s));var jsonp,jsre=/=\?(&|$)/g,status,data,type=s.type.toUpperCase();if(s.data&&s.processData&&typeof s.data!="string")s.data=jQuery.param(s.data);if(s.dataType=="jsonp"){if(type=="GET"){if(!s.url.match(jsre))s.url+=(s.url.match(/\?/)?"&":"?")+(s.jsonp||"callback")+"=?";}else if(!s.data||!s.data.match(jsre))s.data=(s.data?s.data+"&":"")+(s.jsonp||"callback")+"=?";s.dataType="json";}if(s.dataType=="json"&&(s.data&&s.data.match(jsre)||s.url.match(jsre))){jsonp="jsonp"+jsc++;if(s.data)s.data=(s.data+"").replace(jsre,"="+jsonp+"$1");s.url=s.url.replace(jsre,"="+jsonp+"$1");s.dataType="script";window[jsonp]=function(tmp){data=tmp;success();complete();window[jsonp]=undefined;try{delete window[jsonp];}catch(e){}if(head)head.removeChild(script);};}if(s.dataType=="script"&&s.cache==null)s.cache=false;if(s.cache===false&&type=="GET"){var ts=now();var ret=s.url.replace(/(\?|&)_=.*?(&|$)/,"$1_="+ts+"$2");s.url=ret+((ret==s.url)?(s.url.match(/\?/)?"&":"?")+"_="+ts:"");}if(s.data&&type=="GET"){s.url+=(s.url.match(/\?/)?"&":"?")+s.data;s.data=null;}if(s.global&&!jQuery.active++)jQuery.event.trigger("ajaxStart");var remote=/^(?:\w+:)?\/\/([^\/?#]+)/;if(s.dataType=="script"&&type=="GET"&&remote.test(s.url)&&remote.exec(s.url)[1]!=location.host){var head=document.getElementsByTagName("head")[0];var script=document.createElement("script");script.src=s.url;if(s.scriptCharset)script.charset=s.scriptCharset;if(!jsonp){var done=false;script.onload=script.onreadystatechange=function(){if(!done&&(!this.readyState||this.readyState=="loaded"||this.readyState=="complete")){done=true;success();complete();head.removeChild(script);}};}head.appendChild(script);return undefined;}var requestDone=false;var xhr=window.ActiveXObject?new ActiveXObject("Microsoft.XMLHTTP"):new XMLHttpRequest();if(s.username)xhr.open(type,s.url,s.async,s.username,s.password);else xhr.open(type,s.url,s.async);try{if(s.data)xhr.setRequestHeader("Content-Type",s.contentType);if(s.ifModified)xhr.setRequestHeader("If-Modified-Since",jQuery.lastModified[s.url]||"Thu, 01 Jan 1970 00:00:00 GMT");xhr.setRequestHeader("X-Requested-With","XMLHttpRequest");xhr.setRequestHeader("Accept",s.dataType&&s.accepts[s.dataType]?s.accepts[s.dataType]+", */*":s.accepts._default);}catch(e){}if(s.beforeSend&&s.beforeSend(xhr,s)===false){s.global&&jQuery.active--;xhr.abort();return false;}if(s.global)jQuery.event.trigger("ajaxSend",[xhr,s]);var onreadystatechange=function(isTimeout){if(!requestDone&&xhr&&(xhr.readyState==4||isTimeout=="timeout")){requestDone=true;if(ival){clearInterval(ival);ival=null;}status=isTimeout=="timeout"&&"timeout"||!jQuery.httpSuccess(xhr)&&"error"||s.ifModified&&jQuery.httpNotModified(xhr,s.url)&&"notmodified"||"success";if(status=="success"){try{data=jQuery.httpData(xhr,s.dataType,s.dataFilter);}catch(e){status="parsererror";}}if(status=="success"){var modRes;try{modRes=xhr.getResponseHeader("Last-Modified");}catch(e){}if(s.ifModified&&modRes)jQuery.lastModified[s.url]=modRes;if(!jsonp)success();}else jQuery.handleError(s,xhr,status);complete();if(s.async)xhr=null;}};if(s.async){var ival=setInterval(onreadystatechange,13);if(s.timeout>0)setTimeout(function(){if(xhr){xhr.abort();if(!requestDone)onreadystatechange("timeout");}},s.timeout);}try{xhr.send(s.data);}catch(e){jQuery.handleError(s,xhr,null,e);}if(!s.async)onreadystatechange();function success(){if(s.success)s.success(data,status);if(s.global)jQuery.event.trigger("ajaxSuccess",[xhr,s]);}function complete(){if(s.complete)s.complete(xhr,status);if(s.global)jQuery.event.trigger("ajaxComplete",[xhr,s]);if(s.global&&!--jQuery.active)jQuery.event.trigger("ajaxStop");}return xhr;},handleError:function(s,xhr,status,e){if(s.error)s.error(xhr,status,e);if(s.global)jQuery.event.trigger("ajaxError",[xhr,s,e]);},active:0,httpSuccess:function(xhr){try{return!xhr.status&&location.protocol=="file:"||(xhr.status>=200&&xhr.status<300)||xhr.status==304||xhr.status==1223||jQuery.browser.safari&&xhr.status==undefined;}catch(e){}return false;},httpNotModified:function(xhr,url){try{var xhrRes=xhr.getResponseHeader("Last-Modified");return xhr.status==304||xhrRes==jQuery.lastModified[url]||jQuery.browser.safari&&xhr.status==undefined;}catch(e){}return false;},httpData:function(xhr,type,filter){var ct=xhr.getResponseHeader("content-type"),xml=type=="xml"||!type&&ct&&ct.indexOf("xml")>=0,data=xml?xhr.responseXML:xhr.responseText;if(xml&&data.documentElement.tagName=="parsererror")throw"parsererror";if(filter)data=filter(data,type);if(type=="script")jQuery.globalEval(data);if(type=="json")data=eval("("+data+")");return data;},param:function(a){var s=[];if(a.constructor==Array||a.jquery)jQuery.each(a,function(){s.push(encodeURIComponent(this.name)+"="+encodeURIComponent(this.value));});else for(var j in a)if(a[j]&&a[j].constructor==Array)jQuery.each(a[j],function(){s.push(encodeURIComponent(j)+"="+encodeURIComponent(this));});else s.push(encodeURIComponent(j)+"="+encodeURIComponent(jQuery.isFunction(a[j])?a[j]():a[j]));return s.join("&").replace(/%20/g,"+");}});jQuery.fn.extend({show:function(speed,callback){return speed?this.animate({height:"show",width:"show",opacity:"show"},speed,callback):this.filter(":hidden").each(function(){this.style.display=this.oldblock||"";if(jQuery.css(this,"display")=="none"){var elem=jQuery("<"+this.tagName+" />").appendTo("body");this.style.display=elem.css("display");if(this.style.display=="none")this.style.display="block";elem.remove();}}).end();},hide:function(speed,callback){return speed?this.animate({height:"hide",width:"hide",opacity:"hide"},speed,callback):this.filter(":visible").each(function(){this.oldblock=this.oldblock||jQuery.css(this,"display");this.style.display="none";}).end();},_toggle:jQuery.fn.toggle,toggle:function(fn,fn2){return jQuery.isFunction(fn)&&jQuery.isFunction(fn2)?this._toggle.apply(this,arguments):fn?this.animate({height:"toggle",width:"toggle",opacity:"toggle"},fn,fn2):this.each(function(){jQuery(this)[jQuery(this).is(":hidden")?"show":"hide"]();});},slideDown:function(speed,callback){return this.animate({height:"show"},speed,callback);},slideUp:function(speed,callback){return this.animate({height:"hide"},speed,callback);},slideToggle:function(speed,callback){return this.animate({height:"toggle"},speed,callback);},fadeIn:function(speed,callback){return this.animate({opacity:"show"},speed,callback);},fadeOut:function(speed,callback){return this.animate({opacity:"hide"},speed,callback);},fadeTo:function(speed,to,callback){return this.animate({opacity:to},speed,callback);},animate:function(prop,speed,easing,callback){var optall=jQuery.speed(speed,easing,callback);return this[optall.queue===false?"each":"queue"](function(){if(this.nodeType!=1)return false;var opt=jQuery.extend({},optall),p,hidden=jQuery(this).is(":hidden"),self=this;for(p in prop){if(prop[p]=="hide"&&hidden||prop[p]=="show"&&!hidden)return opt.complete.call(this);if(p=="height"||p=="width"){opt.display=jQuery.css(this,"display");opt.overflow=this.style.overflow;}}if(opt.overflow!=null)this.style.overflow="hidden";opt.curAnim=jQuery.extend({},prop);jQuery.each(prop,function(name,val){var e=new jQuery.fx(self,opt,name);if(/toggle|show|hide/.test(val))e[val=="toggle"?hidden?"show":"hide":val](prop);else{var parts=val.toString().match(/^([+-]=)?([\d+-.]+)(.*)$/),start=e.cur(true)||0;if(parts){var end=parseFloat(parts[2]),unit=parts[3]||"px";if(unit!="px"){self.style[name]=(end||1)+unit;start=((end||1)/e.cur(true))*start;self.style[name]=start+unit;}if(parts[1])end=((parts[1]=="-="?-1:1)*end)+start;e.custom(start,end,unit);}else e.custom(start,val,"");}});return true;});},queue:function(type,fn){if(jQuery.isFunction(type)||(type&&type.constructor==Array)){fn=type;type="fx";}if(!type||(typeof type=="string"&&!fn))return queue(this[0],type);return this.each(function(){if(fn.constructor==Array)queue(this,type,fn);else{queue(this,type).push(fn);if(queue(this,type).length==1)fn.call(this);}});},stop:function(clearQueue,gotoEnd){var timers=jQuery.timers;if(clearQueue)this.queue([]);this.each(function(){for(var i=timers.length-1;i>=0;i--)if(timers[i].elem==this){if(gotoEnd)timers[i](true);timers.splice(i,1);}});if(!gotoEnd)this.dequeue();return this;}});var queue=function(elem,type,array){if(elem){type=type||"fx";var q=jQuery.data(elem,type+"queue");if(!q||array)q=jQuery.data(elem,type+"queue",jQuery.makeArray(array));}return q;};jQuery.fn.dequeue=function(type){type=type||"fx";return this.each(function(){var q=queue(this,type);q.shift();if(q.length)q[0].call(this);});};jQuery.extend({speed:function(speed,easing,fn){var opt=speed&&speed.constructor==Object?speed:{complete:fn||!fn&&easing||jQuery.isFunction(speed)&&speed,duration:speed,easing:fn&&easing||easing&&easing.constructor!=Function&&easing};opt.duration=(opt.duration&&opt.duration.constructor==Number?opt.duration:jQuery.fx.speeds[opt.duration])||jQuery.fx.speeds.def;opt.old=opt.complete;opt.complete=function(){if(opt.queue!==false)jQuery(this).dequeue();if(jQuery.isFunction(opt.old))opt.old.call(this);};return opt;},easing:{linear:function(p,n,firstNum,diff){return firstNum+diff*p;},swing:function(p,n,firstNum,diff){return((-Math.cos(p*Math.PI)/2)+0.5)*diff+firstNum;}},timers:[],timerId:null,fx:function(elem,options,prop){this.options=options;this.elem=elem;this.prop=prop;if(!options.orig)options.orig={};}});jQuery.fx.prototype={update:function(){if(this.options.step)this.options.step.call(this.elem,this.now,this);(jQuery.fx.step[this.prop]||jQuery.fx.step._default)(this);if(this.prop=="height"||this.prop=="width")this.elem.style.display="block";},cur:function(force){if(this.elem[this.prop]!=null&&this.elem.style[this.prop]==null)return this.elem[this.prop];var r=parseFloat(jQuery.css(this.elem,this.prop,force));return r&&r>-10000?r:parseFloat(jQuery.curCSS(this.elem,this.prop))||0;},custom:function(from,to,unit){this.startTime=now();this.start=from;this.end=to;this.unit=unit||this.unit||"px";this.now=this.start;this.pos=this.state=0;this.update();var self=this;function t(gotoEnd){return self.step(gotoEnd);}t.elem=this.elem;jQuery.timers.push(t);if(jQuery.timerId==null){jQuery.timerId=setInterval(function(){var timers=jQuery.timers;for(var i=0;ithis.options.duration+this.startTime){this.now=this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;var done=true;for(var i in this.options.curAnim)if(this.options.curAnim[i]!==true)done=false;if(done){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;this.elem.style.display=this.options.display;if(jQuery.css(this.elem,"display")=="none")this.elem.style.display="block";}if(this.options.hide)this.elem.style.display="none";if(this.options.hide||this.options.show)for(var p in this.options.curAnim)jQuery.attr(this.elem.style,p,this.options.orig[p]);}if(done)this.options.complete.call(this.elem);return false;}else{var n=t-this.startTime;this.state=n/this.options.duration;this.pos=jQuery.easing[this.options.easing||(jQuery.easing.swing?"swing":"linear")](this.state,n,0,1,this.options.duration);this.now=this.start+((this.end-this.start)*this.pos);this.update();}return true;}};jQuery.extend(jQuery.fx,{speeds:{slow:600,fast:200,def:400},step:{scrollLeft:function(fx){fx.elem.scrollLeft=fx.now;},scrollTop:function(fx){fx.elem.scrollTop=fx.now;},opacity:function(fx){jQuery.attr(fx.elem.style,"opacity",fx.now);},_default:function(fx){fx.elem.style[fx.prop]=fx.now+fx.unit;}}});jQuery.fn.offset=function(){var left=0,top=0,elem=this[0],results;if(elem)with(jQuery.browser){var parent=elem.parentNode,offsetChild=elem,offsetParent=elem.offsetParent,doc=elem.ownerDocument,safari2=safari&&parseInt(version)<522&&!/adobeair/i.test(userAgent),css=jQuery.curCSS,fixed=css(elem,"position")=="fixed";if(elem.getBoundingClientRect){var box=elem.getBoundingClientRect();add(box.left+Math.max(doc.documentElement.scrollLeft,doc.body.scrollLeft),box.top+Math.max(doc.documentElement.scrollTop,doc.body.scrollTop));add(-doc.documentElement.clientLeft,-doc.documentElement.clientTop);}else{add(elem.offsetLeft,elem.offsetTop);while(offsetParent){add(offsetParent.offsetLeft,offsetParent.offsetTop);if(mozilla&&!/^t(able|d|h)$/i.test(offsetParent.tagName)||safari&&!safari2)border(offsetParent);if(!fixed&&css(offsetParent,"position")=="fixed")fixed=true;offsetChild=/^body$/i.test(offsetParent.tagName)?offsetChild:offsetParent;offsetParent=offsetParent.offsetParent;}while(parent&&parent.tagName&&!/^body|html$/i.test(parent.tagName)){if(!/^inline|table.*$/i.test(css(parent,"display")))add(-parent.scrollLeft,-parent.scrollTop);if(mozilla&&css(parent,"overflow")!="visible")border(parent);parent=parent.parentNode;}if((safari2&&(fixed||css(offsetChild,"position")=="absolute"))||(mozilla&&css(offsetChild,"position")!="absolute"))add(-doc.body.offsetLeft,-doc.body.offsetTop);if(fixed)add(Math.max(doc.documentElement.scrollLeft,doc.body.scrollLeft),Math.max(doc.documentElement.scrollTop,doc.body.scrollTop));}results={top:top,left:left};}function border(elem){add(jQuery.curCSS(elem,"borderLeftWidth",true),jQuery.curCSS(elem,"borderTopWidth",true));}function add(l,t){left+=parseInt(l,10)||0;top+=parseInt(t,10)||0;}return results;};jQuery.fn.extend({position:function(){var left=0,top=0,results;if(this[0]){var offsetParent=this.offsetParent(),offset=this.offset(),parentOffset=/^body|html$/i.test(offsetParent[0].tagName)?{top:0,left:0}:offsetParent.offset();offset.top-=num(this,'marginTop');offset.left-=num(this,'marginLeft');parentOffset.top+=num(offsetParent,'borderTopWidth');parentOffset.left+=num(offsetParent,'borderLeftWidth');results={top:offset.top-parentOffset.top,left:offset.left-parentOffset.left};}return results;},offsetParent:function(){var offsetParent=this[0].offsetParent;while(offsetParent&&(!/^body|html$/i.test(offsetParent.tagName)&&jQuery.css(offsetParent,'position')=='static'))offsetParent=offsetParent.offsetParent;return jQuery(offsetParent);}});jQuery.each(['Left','Top'],function(i,name){var method='scroll'+name;jQuery.fn[method]=function(val){if(!this[0])return;return val!=undefined?this.each(function(){this==window||this==document?window.scrollTo(!i?val:jQuery(window).scrollLeft(),i?val:jQuery(window).scrollTop()):this[method]=val;}):this[0]==window||this[0]==document?self[i?'pageYOffset':'pageXOffset']||jQuery.boxModel&&document.documentElement[method]||document.body[method]:this[0][method];};});jQuery.each(["Height","Width"],function(i,name){var tl=i?"Left":"Top",br=i?"Right":"Bottom";jQuery.fn["inner"+name]=function(){return this[name.toLowerCase()]()+num(this,"padding"+tl)+num(this,"padding"+br);};jQuery.fn["outer"+name]=function(margin){return this["inner"+name]()+num(this,"border"+tl+"Width")+num(this,"border"+br+"Width")+(margin?num(this,"margin"+tl)+num(this,"margin"+br):0);};});})();prophet-0.750/share/web/static/jquery/js/jquery-autocomplete.js000066400000000000000000000466561160607302300247020ustar00rootroot00000000000000/* * Autocomplete - jQuery plugin 1.0.2 * * Copyright (c) 2007 Dylan Verheul, Dan G. Switzer, Anjesh Tuladhar, Jörn Zaefferer * * Dual licensed under the MIT and GPL licenses: * http://www.opensource.org/licenses/mit-license.php * http://www.gnu.org/licenses/gpl.html * * Revision: $Id: jquery.autocomplete.js 5747 2008-06-25 18:30:55Z joern.zaefferer $ * */ ;(function($) { $.fn.extend({ autocomplete: function(urlOrData, options) { var isUrl = typeof urlOrData == "string"; options = $.extend({}, $.Autocompleter.defaults, { url: isUrl ? urlOrData : null, data: isUrl ? null : urlOrData, delay: isUrl ? $.Autocompleter.defaults.delay : 10, max: options && !options.scroll ? 10 : 150 }, options); // if highlight is set to false, replace it with a do-nothing function options.highlight = options.highlight || function(value) { return value; }; // if the formatMatch option is not specified, then use formatItem for backwards compatibility options.formatMatch = options.formatMatch || options.formatItem; return this.each(function() { new $.Autocompleter(this, options); }); }, result: function(handler) { return this.bind("result", handler); }, search: function(handler) { return this.trigger("search", [handler]); }, flushCache: function() { return this.trigger("flushCache"); }, setOptions: function(options){ return this.trigger("setOptions", [options]); }, unautocomplete: function() { return this.trigger("unautocomplete"); } }); $.Autocompleter = function(input, options) { var KEY = { UP: 38, DOWN: 40, DEL: 46, TAB: 9, RETURN: 13, ESC: 27, COMMA: 188, PAGEUP: 33, PAGEDOWN: 34, BACKSPACE: 8 }; // Create $ object for input element var $input = $(input).attr("autocomplete", "off").addClass(options.inputClass); var timeout; var previousValue = ""; var cache = $.Autocompleter.Cache(options); var hasFocus = 0; var lastKeyPressCode; var config = { mouseDownOnSelect: false }; var select = $.Autocompleter.Select(options, input, selectCurrent, config); var blockSubmit; // prevent form submit in opera when selecting with return key $.browser.opera && $(input.form).bind("submit.autocomplete", function() { if (blockSubmit) { blockSubmit = false; return false; } }); // only opera doesn't trigger keydown multiple times while pressed, others don't work with keypress at all $input.bind(($.browser.opera ? "keypress" : "keydown") + ".autocomplete", function(event) { // track last key pressed lastKeyPressCode = event.keyCode; switch(event.keyCode) { case KEY.UP: event.preventDefault(); if ( select.visible() ) { select.prev(); } else { onChange(0, true); } break; case KEY.DOWN: event.preventDefault(); if ( select.visible() ) { select.next(); } else { onChange(0, true); } break; case KEY.PAGEUP: event.preventDefault(); if ( select.visible() ) { select.pageUp(); } else { onChange(0, true); } break; case KEY.PAGEDOWN: event.preventDefault(); if ( select.visible() ) { select.pageDown(); } else { onChange(0, true); } break; // matches also semicolon case options.multiple && $.trim(options.multipleSeparator) == "," && KEY.COMMA: case KEY.TAB: case KEY.RETURN: if( selectCurrent() ) { // stop default to prevent a form submit, Opera needs special handling event.preventDefault(); blockSubmit = true; return false; } break; case KEY.ESC: select.hide(); break; default: clearTimeout(timeout); timeout = setTimeout(onChange, options.delay); break; } }).focus(function(){ // track whether the field has focus, we shouldn't process any // results if the field no longer has focus if ( ++hasFocus > 0 && !select.visible() ) { onChange(0, false); } }).blur(function() { hasFocus = 0; if (!config.mouseDownOnSelect) { hideResults(); } }).click(function() { // show select when clicking in a focused field if ( hasFocus++ > 1 && !select.visible() ) { onChange(0, true); } }).bind("search", function() { // TODO why not just specifying both arguments? var fn = (arguments.length > 1) ? arguments[1] : null; function findValueCallback(q, data) { var result; if( data && data.length ) { for (var i=0; i < data.length; i++) { if( data[i].result.toLowerCase() == q.toLowerCase() ) { result = data[i]; break; } } } if( typeof fn == "function" ) fn(result); else $input.trigger("result", result && [result.data, result.value]); } $.each(trimWords($input.val()), function(i, value) { request(value, findValueCallback, findValueCallback); }); }).bind("flushCache", function() { cache.flush(); }).bind("setOptions", function() { $.extend(options, arguments[1]); // if we've updated the data, repopulate if ( "data" in arguments[1] ) cache.populate(); }).bind("unautocomplete", function() { select.unbind(); $input.unbind(); $(input.form).unbind(".autocomplete"); }); function selectCurrent() { var selected = select.selected(); if( !selected ) return false; var v = selected.result; previousValue = v; if ( options.multiple ) { var words = trimWords($input.val()); if ( words.length > 1 ) { v = words.slice(0, words.length - 1).join( options.multipleSeparator ) + options.multipleSeparator + v; } v += options.multipleSeparator; } $input.val(v); hideResultsNow(); $input.trigger("result", [selected.data, selected.value]); return true; } function onChange(crap, skipPrevCheck) { if( lastKeyPressCode == KEY.DEL ) { select.hide(); return; } var currentValue = $input.val(); if ( !skipPrevCheck && currentValue == previousValue ) return; previousValue = currentValue; currentValue = lastWord(currentValue); if ( currentValue.length >= options.minChars) { $input.addClass(options.loadingClass); if (!options.matchCase) currentValue = currentValue.toLowerCase(); request(currentValue, receiveData, hideResultsNow); } else { stopLoading(); select.hide(); } }; function trimWords(value) { if ( !value ) { return [""]; } var words = value.split( options.multipleSeparator ); var result = []; $.each(words, function(i, value) { if ( $.trim(value) ) result[i] = $.trim(value); }); return result; } function lastWord(value) { if ( !options.multiple ) return value; var words = trimWords(value); return words[words.length - 1]; } // fills in the input box w/the first match (assumed to be the best match) // q: the term entered // sValue: the first matching result function autoFill(q, sValue){ // autofill in the complete box w/the first match as long as the user hasn't entered in more data // if the last user key pressed was backspace, don't autofill if( options.autoFill && (lastWord($input.val()).toLowerCase() == q.toLowerCase()) && lastKeyPressCode != KEY.BACKSPACE ) { // fill in the value (keep the case the user has typed) $input.val($input.val() + sValue.substring(lastWord(previousValue).length)); // select the portion of the value not typed by the user (so the next character will erase) $.Autocompleter.Selection(input, previousValue.length, previousValue.length + sValue.length); } }; function hideResults() { clearTimeout(timeout); timeout = setTimeout(hideResultsNow, 200); }; function hideResultsNow() { var wasVisible = select.visible(); select.hide(); clearTimeout(timeout); stopLoading(); if (options.mustMatch) { // call search and run callback $input.search( function (result){ // if no value found, clear the input box if( !result ) { if (options.multiple) { var words = trimWords($input.val()).slice(0, -1); $input.val( words.join(options.multipleSeparator) + (words.length ? options.multipleSeparator : "") ); } else $input.val( "" ); } } ); } if (wasVisible) // position cursor at end of input field $.Autocompleter.Selection(input, input.value.length, input.value.length); }; function receiveData(q, data) { if ( data && data.length && hasFocus ) { stopLoading(); select.display(data, q); autoFill(q, data[0].value); select.show(); } else { hideResultsNow(); } }; function request(term, success, failure) { if (!options.matchCase) term = term.toLowerCase(); var data = cache.load(term); // recieve the cached data if (data && data.length) { success(term, data); // if an AJAX url has been supplied, try loading the data now } else if( (typeof options.url == "string") && (options.url.length > 0) ){ var extraParams = { timestamp: +new Date() }; $.each(options.extraParams, function(key, param) { extraParams[key] = typeof param == "function" ? param() : param; }); $.ajax({ // try to leverage ajaxQueue plugin to abort previous requests mode: "abort", // limit abortion to this input port: "autocomplete" + input.name, dataType: options.dataType, url: options.url, data: $.extend({ q: lastWord(term), limit: options.max }, extraParams), success: function(data) { var parsed = options.parse && options.parse(data) || parse(data); cache.add(term, parsed); success(term, parsed); } }); } else { // if we have a failure, we need to empty the list -- this prevents the the [TAB] key from selecting the last successful match select.emptyList(); failure(term); } }; function parse(data) { var parsed = []; var rows = data.split("\n"); for (var i=0; i < rows.length; i++) { var row = $.trim(rows[i]); if (row) { row = row.split("|"); parsed[parsed.length] = { data: row, value: row[0], result: options.formatResult && options.formatResult(row, row[0]) || row[0] }; } } return parsed; }; function stopLoading() { $input.removeClass(options.loadingClass); }; }; $.Autocompleter.defaults = { inputClass: "ac_input", resultsClass: "ac_results", loadingClass: "ac_loading", minChars: 1, delay: 400, matchCase: false, matchSubset: true, matchContains: false, cacheLength: 10, max: 100, mustMatch: false, extraParams: {}, selectFirst: true, formatItem: function(row) { return row[0]; }, formatMatch: null, autoFill: false, width: 0, multiple: false, multipleSeparator: ", ", highlight: function(value, term) { return value.replace(new RegExp("(?![^&;]+;)(?!<[^<>]*)(" + term.replace(/([\^\$\(\)\[\]\{\}\*\.\+\?\|\\])/gi, "\\$1") + ")(?![^<>]*>)(?![^&;]+;)", "gi"), "$1"); }, scroll: true, scrollHeight: 180 }; $.Autocompleter.Cache = function(options) { var data = {}; var length = 0; function matchSubset(s, sub) { if (!options.matchCase) s = s.toLowerCase(); var i = s.indexOf(sub); if (i == -1) return false; return i == 0 || options.matchContains; }; function add(q, value) { if (length > options.cacheLength){ flush(); } if (!data[q]){ length++; } data[q] = value; } function populate(){ if( !options.data ) return false; // track the matches var stMatchSets = {}, nullData = 0; // no url was specified, we need to adjust the cache length to make sure it fits the local data store if( !options.url ) options.cacheLength = 1; // track all options for minChars = 0 stMatchSets[""] = []; // loop through the array and create a lookup structure for ( var i = 0, ol = options.data.length; i < ol; i++ ) { var rawValue = options.data[i]; // if rawValue is a string, make an array otherwise just reference the array rawValue = (typeof rawValue == "string") ? [rawValue] : rawValue; var value = options.formatMatch(rawValue, i+1, options.data.length); if ( value === false ) continue; var firstChar = value.charAt(0).toLowerCase(); // if no lookup array for this character exists, look it up now if( !stMatchSets[firstChar] ) stMatchSets[firstChar] = []; // if the match is a string var row = { value: value, data: rawValue, result: options.formatResult && options.formatResult(rawValue) || value }; // push the current match into the set list stMatchSets[firstChar].push(row); // keep track of minChars zero items if ( nullData++ < options.max ) { stMatchSets[""].push(row); } }; // add the data items to the cache $.each(stMatchSets, function(i, value) { // increase the cache size options.cacheLength++; // add to the cache add(i, value); }); } // populate any existing data setTimeout(populate, 25); function flush(){ data = {}; length = 0; } return { flush: flush, add: add, populate: populate, load: function(q) { if (!options.cacheLength || !length) return null; /* * if dealing w/local data and matchContains than we must make sure * to loop through all the data collections looking for matches */ if( !options.url && options.matchContains ){ // track all matches var csub = []; // loop through all the data grids for matches for( var k in data ){ // don't search through the stMatchSets[""] (minChars: 0) cache // this prevents duplicates if( k.length > 0 ){ var c = data[k]; $.each(c, function(i, x) { // if we've got a match, add it to the array if (matchSubset(x.value, q)) { csub.push(x); } }); } } return csub; } else // if the exact item exists, use it if (data[q]){ return data[q]; } else if (options.matchSubset) { for (var i = q.length - 1; i >= options.minChars; i--) { var c = data[q.substr(0, i)]; if (c) { var csub = []; $.each(c, function(i, x) { if (matchSubset(x.value, q)) { csub[csub.length] = x; } }); return csub; } } } return null; } }; }; $.Autocompleter.Select = function (options, input, select, config) { var CLASSES = { ACTIVE: "ac_over" }; var listItems, active = -1, data, term = "", needsInit = true, element, list; // Create results function init() { if (!needsInit) return; element = $("
") .hide() .addClass(options.resultsClass) .css("position", "absolute") .appendTo(document.body); list = $("
    ").appendTo(element).mouseover( function(event) { if(target(event).nodeName && target(event).nodeName.toUpperCase() == 'LI') { active = $("li", list).removeClass(CLASSES.ACTIVE).index(target(event)); $(target(event)).addClass(CLASSES.ACTIVE); } }).click(function(event) { $(target(event)).addClass(CLASSES.ACTIVE); select(); // TODO provide option to avoid setting focus again after selection? useful for cleanup-on-focus input.focus(); return false; }).mousedown(function() { config.mouseDownOnSelect = true; }).mouseup(function() { config.mouseDownOnSelect = false; }); if( options.width > 0 ) element.css("width", options.width); needsInit = false; } function target(event) { var element = event.target; while(element && element.tagName != "LI") element = element.parentNode; // more fun with IE, sometimes event.target is empty, just ignore it then if(!element) return []; return element; } function moveSelect(step) { listItems.slice(active, active + 1).removeClass(CLASSES.ACTIVE); movePosition(step); var activeItem = listItems.slice(active, active + 1).addClass(CLASSES.ACTIVE); if(options.scroll) { var offset = 0; listItems.slice(0, active).each(function() { offset += this.offsetHeight; }); if((offset + activeItem[0].offsetHeight - list.scrollTop()) > list[0].clientHeight) { list.scrollTop(offset + activeItem[0].offsetHeight - list.innerHeight()); } else if(offset < list.scrollTop()) { list.scrollTop(offset); } } }; function movePosition(step) { active += step; if (active < 0) { active = listItems.size() - 1; } else if (active >= listItems.size()) { active = 0; } } function limitNumberOfItems(available) { return options.max && options.max < available ? options.max : available; } function fillList() { list.empty(); var max = limitNumberOfItems(data.length); for (var i=0; i < max; i++) { if (!data[i]) continue; var formatted = options.formatItem(data[i].data, i+1, max, data[i].value, term); if ( formatted === false ) continue; var li = $("
  • ").html( options.highlight(formatted, term) ).addClass(i%2 == 0 ? "ac_even" : "ac_odd").appendTo(list)[0]; $.data(li, "ac_data", data[i]); } listItems = list.find("li"); if ( options.selectFirst ) { listItems.slice(0, 1).addClass(CLASSES.ACTIVE); active = 0; } // apply bgiframe if available if ( $.fn.bgiframe ) list.bgiframe(); } return { display: function(d, q) { init(); data = d; term = q; fillList(); }, next: function() { moveSelect(1); }, prev: function() { moveSelect(-1); }, pageUp: function() { if (active != 0 && active - 8 < 0) { moveSelect( -active ); } else { moveSelect(-8); } }, pageDown: function() { if (active != listItems.size() - 1 && active + 8 > listItems.size()) { moveSelect( listItems.size() - 1 - active ); } else { moveSelect(8); } }, hide: function() { element && element.hide(); listItems && listItems.removeClass(CLASSES.ACTIVE); active = -1; }, visible : function() { return element && element.is(":visible"); }, current: function() { return this.visible() && (listItems.filter("." + CLASSES.ACTIVE)[0] || options.selectFirst && listItems[0]); }, show: function() { var offset = $(input).offset(); element.css({ width: typeof options.width == "string" || options.width > 0 ? options.width : $(input).width(), top: offset.top + input.offsetHeight, left: offset.left }).show(); if(options.scroll) { list.scrollTop(0); list.css({ maxHeight: options.scrollHeight, overflow: 'auto' }); if($.browser.msie && typeof document.body.style.maxHeight === "undefined") { var listHeight = 0; listItems.each(function() { listHeight += this.offsetHeight; }); var scrollbarsVisible = listHeight > options.scrollHeight; list.css('height', scrollbarsVisible ? options.scrollHeight : listHeight ); if (!scrollbarsVisible) { // IE doesn't recalculate width when scrollbar disappears listItems.width( list.width() - parseInt(listItems.css("padding-left")) - parseInt(listItems.css("padding-right")) ); } } } }, selected: function() { var selected = listItems && listItems.filter("." + CLASSES.ACTIVE).removeClass(CLASSES.ACTIVE); return selected && selected.length && $.data(selected[0], "ac_data"); }, emptyList: function (){ list && list.empty(); }, unbind: function() { element && element.remove(); } }; }; $.Autocompleter.Selection = function(field, start, end) { if( field.createTextRange ){ var selRange = field.createTextRange(); selRange.collapse(true); selRange.moveStart("character", start); selRange.moveEnd("character", end); selRange.select(); } else if( field.setSelectionRange ){ field.setSelectionRange(start, end); } else { if( field.selectionStart ){ field.selectionStart = start; field.selectionEnd = end; } } field.focus(); }; })(jQuery); prophet-0.750/share/web/static/jquery/js/jquery.bgiframe.min.js000066400000000000000000000027551160607302300245300ustar00rootroot00000000000000/* Copyright (c) 2006 Brandon Aaron (http://brandonaaron.net) * Dual licensed under the MIT (http://www.opensource.org/licenses/mit-license.php) * and GPL (http://www.opensource.org/licenses/gpl-license.php) licenses. * * $LastChangedDate: 2007-06-19 20:25:28 -0500 (Tue, 19 Jun 2007) $ * $Rev: 2111 $ * * Version 2.1 */ (function($){$.fn.bgIframe=$.fn.bgiframe=function(s){if($.browser.msie&&parseInt($.browser.version)<=6){s=$.extend({top:'auto',left:'auto',width:'auto',height:'auto',opacity:true,src:'javascript:false;'},s||{});var prop=function(n){return n&&n.constructor==Number?n+'px':n;},html='