Merge pull request #506 from metamaps/feature/import.export

minimal import/export - could go in v2.8 for the keeners
This commit is contained in:
Connor Turland 2016-03-26 23:46:28 -07:00
commit e4145ef9fb
11 changed files with 438 additions and 51 deletions

View file

@ -25,6 +25,7 @@
//= require ./src/views/room
//= require ./src/JIT
//= require ./src/Metamaps
//= require ./src/Metamaps.Import
//= require ./src/Metamaps.JIT
//= require_directory ./shims
//= require_directory ./require

View file

@ -0,0 +1,303 @@
/*
* Example tab-separated input:
* Some fields will be ignored
*
* Topics
* Id Name Metacode X Y Description Link User Permission
* 8 topic8 Action -231 131 admin commons
* 5 topic Action -229 -131 admin commons
* 7 topic7.1 Action -470 -55 hey admin commons
* 2 topic2 Event -57 -63 admin commons
* 1 topic1 Catalyst -51 50 admin commons
* 6 topic6 Action -425 63 admin commons
*
* Synapses
* Topic1 Topic2 Category Description User Permission
* 6 2 from-to admin commons
* 6 1 from-to admin commons
* 6 5 from-to admin commons
* 2 7 from-to admin commons
* 8 6 from-to admin commons
* 8 1 from-to admin commons
*
*/
Metamaps.Import = {
// note that user is not imported
topicWhitelist: [
'id', 'name', 'metacode', 'x', 'y', 'description', 'link', 'permission'
],
synapseWhitelist: [
'topic1', 'topic2', 'category', 'desc', 'description', 'permission'
],
cidMappings: {}, //to be filled by import_id => cid mappings
init: function() {
var self = Metamaps.Import;
$('body').bind('paste', function(e) {
var text = e.originalEvent.clipboardData.getData('text/plain');
var results;
if (text[0] === '{') {
try {
results = JSON.parse(text);
} catch (Error e) {
results = false;
}
} else {
results = self.parseTabbedString(text);
}
if (results === false) return;
var topics = results.topics;
var synapses = results.synapses;
if (topics.length > 0 || synapses.length > 0) {
if (confirm("Are you sure you want to create " + topics.length +
" new topics and " + synapses.length + " new synapses?")) {
self.importTopics(topics);
self.importSynapses(synapses);
}//if
}//if
});
},
abort: function(message) {
alert("Sorry, something went wrong!\n\n" + message);
console.error(message);
},
simplify: function(string) {
return string
.replace(/(^\s*|\s*$)/g, '')
.toLowerCase();
},
parseTabbedString: function(text) {
var self = Metamaps.Import;
// determine line ending and split lines
var delim = "\n";
if (text.indexOf("\r\n") !== -1) {
delim = "\r\n";
} else if (text.indexOf("\r") !== -1) {
delim = "\r";
}//if
var STATES = {
ABORT: -1,
UNKNOWN: 0,
TOPICS_NEED_HEADERS: 1,
SYNAPSES_NEED_HEADERS: 2,
TOPICS: 3,
SYNAPSES: 4,
};
// state & lines determine parser behaviour
var state = STATES.UNKNOWN;
var lines = text.split(delim);
var results = { topics: [], synapses: [] }
var topicHeaders = [];
var synapseHeaders = [];
lines.forEach(function(line_raw, index) {
var line = line_raw.split("\t");
var noblanks = line.filter(function(elt) {
return elt !== "";
});
switch(state) {
case STATES.UNKNOWN:
if (noblanks.length === 0) {
state = STATES.UNKNOWN;
break;
} else if (noblanks.length === 1 && self.simplify(line[0]) === 'topics') {
state = STATES.TOPICS_NEED_HEADERS;
break;
} else if (noblanks.length === 1 && self.simplify(line[0]) === 'synapses') {
state = STATES.SYNAPSES_NEED_HEADERS;
break;
}
state = STATES.TOPICS_NEED_HEADERS;
// FALL THROUGH - if we're not sure what to do, pretend
// we're on the TOPICS_NEED_HEADERS state and parse some headers
case STATES.TOPICS_NEED_HEADERS:
if (noblanks.length < 2) {
self.abort("Not enough topic headers on line " + index);
state = STATES.ABORT;
}
topicHeaders = line.map(function(header, index) {
return header.toLowerCase().replace('description', 'desc');
});
state = STATES.TOPICS;
break;
case STATES.SYNAPSES_NEED_HEADERS:
if (noblanks.length < 2) {
self.abort("Not enough synapse headers on line " + index);
state = STATES.ABORT;
}
synapseHeaders = line.map(function(header, index) {
return header.toLowerCase().replace('description', 'desc');
});
state = STATES.SYNAPSES;
break;
case STATES.TOPICS:
if (noblanks.length === 0) {
state = STATES.UNKNOWN;
} else if (noblanks.length === 1 && line[0].toLowerCase() === 'topics') {
state = STATES.TOPICS_NEED_HEADERS;
} else if (noblanks.length === 1 && line[0].toLowerCase() === 'synapses') {
state = STATES.SYNAPSES_NEED_HEADERS;
} else {
var topic = {};
line.forEach(function(field, index) {
var header = topicHeaders[index];
if (self.topicWhitelist.indexOf(header) === -1) return;
topic[header] = field;
if (['id', 'x', 'y'].indexOf(header) !== -1) {
topic[header] = parseInt(topic[header]);
}//if
});
results.topics.push(topic);
}
break;
case STATES.SYNAPSES:
if (noblanks.length === 0) {
state = STATES.UNKNOWN;
} else if (noblanks.length === 1 && line[0].toLowerCase() === 'topics') {
state = STATES.TOPICS_NEED_HEADERS;
} else if (noblanks.length === 1 && line[0].toLowerCase() === 'synapses') {
state = STATES.SYNAPSES_NEED_HEADERS;
} else {
var synapse = {};
line.forEach(function(field, index) {
var header = synapseHeaders[index];
if (self.synapseWhitelist.indexOf(header) === -1) return;
synapse[header] = field;
if (['id', 'topic1', 'topic2'].indexOf(header) !== -1) {
synapse[header] = parseInt(synapse[header]);
}//if
});
results.synapses.push(synapse);
}
break;
case STATES.ABORT:
;
default:
self.abort("Invalid state while parsing import data. Check code.");
state = STATES.ABORT;
}
});
if (state === STATES.ABORT) {
return false;
} else {
return results;
}
},
importTopics: function(parsedTopics) {
var self = Metamaps.Import;
// up to 25 topics: scale 100
// up to 81 topics: scale 200
// up to 169 topics: scale 300
var scale = Math.floor((Math.sqrt(parsedTopics.length) - 1) / 4) * 100;
if (scale < 100) scale = 100;
var autoX = -scale;
var autoY = -scale;
parsedTopics.forEach(function(topic) {
var x, y;
if (topic.x && topic.y) {
x = topic.x;
y = topic.y;
} else {
x = autoX;
y = autoY;
autoX += 50;
if (autoX > scale) {
autoY += 50;
autoX = -scale;
}
}
self.createTopicWithParameters(
topic.name, topic.metacode, topic.permission,
topic.desc, topic.link, x, y, topic.id
);
});
},
importSynapses: function(parsedSynapses) {
var self = Metamaps.Import;
parsedSynapses.forEach(function(synapse) {
self.createSynapseWithParameters(
synapse.desc, synapse.category, synapse.permission,
synapse.topic1, synapse.topic2
);
});
},
createTopicWithParameters: function(name, metacode_name, permission, desc,
link, xloc, yloc, import_id) {
var self = Metamaps.Import;
$(document).trigger(Metamaps.Map.events.editedByActiveMapper);
var metacode = Metamaps.Metacodes.where({name: metacode_name})[0] || null;
if (metacode === null) return console.error("metacode not found");
var topic = new Metamaps.Backbone.Topic({
name: name,
metacode_id: metacode.id,
permission: permission || Metamaps.Active.Map.get('permission'),
desc: desc,
link: link,
});
Metamaps.Topics.add(topic);
self.cidMappings[import_id] = topic.cid;
var mapping = new Metamaps.Backbone.Mapping({
xloc: xloc,
yloc: yloc,
mappable_id: topic.cid,
mappable_type: "Topic",
});
Metamaps.Mappings.add(mapping);
// this function also includes the creation of the topic in the database
Metamaps.Topic.renderTopic(mapping, topic, true, true);
Metamaps.Famous.viz.hideInstructions();
},
createSynapseWithParameters: function(description, category, permission,
node1_id, node2_id) {
var self = Metamaps.Import;
var topic1 = Metamaps.Topics.get(self.cidMappings[node1_id]);
var topic2 = Metamaps.Topics.get(self.cidMappings[node2_id]);
var node1 = topic1.get('node');
var node2 = topic2.get('node');
// TODO check if topic1 and topic2 were sucessfully found...
var synapse = new Metamaps.Backbone.Synapse({
desc: description,
category: category,
permission: permission,
node1_id: node1.id,
node2_id: node2.id,
});
var mapping = new Metamaps.Backbone.Mapping({
mappable_type: "Synapse",
mappable_id: synapse.id,
});
Metamaps.Synapse.renderSynapse(mapping, synapse, node1, node2, true);
},
};

View file

@ -377,7 +377,7 @@ Metamaps.Backbone.init = function () {
mappable_id: this.isNew() ? this.cid : this.id
});
},
createEdge: function () {
createEdge: function (providedMapping) {
var mapping, mappingID;
var synapseID = this.isNew() ? this.cid : this.id;
@ -391,7 +391,7 @@ Metamaps.Backbone.init = function () {
};
if (Metamaps.Active.Map) {
mapping = this.getMapping();
mapping = providedMapping || this.getMapping();
mappingID = mapping.isNew() ? mapping.cid : mapping.id;
edge.data.$mappings = [];
edge.data.$mappingIDs = [mappingID];
@ -4614,7 +4614,7 @@ Metamaps.Synapse = {
var edgeOnViz;
var newedge = synapse.createEdge();
var newedge = synapse.createEdge(mapping);
Metamaps.Visualize.mGraph.graph.addAdjacence(node1, node2, newedge.data);
edgeOnViz = Metamaps.Visualize.mGraph.graph.getAdjacence(node1.id, node2.id);

View file

@ -85,11 +85,24 @@ class MapsController < ApplicationController
respond_with(@allmappers, @allmappings, @allsynapses, @alltopics, @allmessages, @map)
}
format.json { render json: @map }
format.csv { send_data @map.to_csv }
format.xls
format.csv { redirect_to action: :export, format: :csv }
format.xls { redirect_to action: :export, format: :xls }
end
end
# GET maps/:id/export
def export
map = Map.find(params[:id])
authorize map
exporter = MapExportService.new(current_user, map)
respond_to do |format|
format.json { render json: exporter.json }
format.csv { send_data exporter.csv }
format.xls { @spreadsheet = exporter.xls }
end
end
# GET maps/:id/contains
def contains
@map = Map.find(params[:id])

View file

@ -1,5 +1,5 @@
class MetacodesController < ApplicationController
before_action :require_admin, except: [:index]
before_action :require_admin, except: [:index, :show]
# GET /metacodes
# GET /metacodes.json
@ -18,6 +18,18 @@ class MetacodesController < ApplicationController
end
end
# GET /metacodes/1.json
# GET /metacodes/Action.json
# GET /metacodes/action.json
def show
@metacode = Metacode.where('DOWNCASE(name) = ?', downcase(params[:name])).first if params[:name]
@metacode = Metacode.find(params[:id]) unless @metacode
respond_to do |format|
format.json { render json: @metacode }
end
end
# GET /metacodes/new
# GET /metacodes/new.json
def new

View file

@ -84,24 +84,6 @@ class Map < ActiveRecord::Base
json
end
def to_csv(options = {})
CSV.generate(options) do |csv|
csv << ["id", "name", "metacode", "desc", "link", "user.name", "permission", "synapses"]
self.topics.each do |topic|
csv << [
topic.id,
topic.name,
topic.metacode.name,
topic.desc,
topic.link,
topic.user.name,
topic.permission,
topic.synapses_csv("text")
]
end
end
end
def decode_base64(imgBase64)
decoded_data = Base64.decode64(imgBase64)

View file

@ -31,6 +31,10 @@ class MapPolicy < ApplicationPolicy
record.permission == 'commons' || record.permission == 'public' || record.user == user
end
def export?
show?
end
def contains?
show?
end

View file

@ -0,0 +1,84 @@
class MapExportService < Struct.new(:user, :map)
def json
# marshal_dump turns OpenStruct into a Hash
{
topics: exportable_topics.map(&:marshal_dump),
synapses: exportable_synapses.map(&:marshal_dump)
}
end
def csv(options = {})
CSV.generate(options) do |csv|
to_spreadsheet.each do |line|
csv << line
end
end
end
def xls
to_spreadsheet
end
private
def topic_headings
[:id, :name, :metacode, :x, :y, :description, :link, :user, :permission]
end
def synapse_headings
[:topic1, :topic2, :category, :description, :user, :permission]
end
def exportable_topics
visible_topics ||= Pundit.policy_scope!(user, map.topics)
topic_mappings = Mapping.includes(mappable: [:metacode, :user])
.where(mappable: visible_topics, map: map)
topic_mappings.map do |mapping|
topic = mapping.mappable
OpenStruct.new(
id: topic.id,
name: topic.name,
metacode: topic.metacode.name,
x: mapping.xloc,
y: mapping.yloc,
description: topic.desc,
link: topic.link,
user: topic.user.name,
permission: topic.permission
)
end
end
def exportable_synapses
visible_synapses = Pundit.policy_scope!(user, map.synapses)
visible_synapses.map do |synapse|
OpenStruct.new(
topic1: synapse.node1_id,
topic2: synapse.node2_id,
category: synapse.category,
description: synapse.desc,
user: synapse.user.name,
permission: synapse.permission
)
end
end
def to_spreadsheet
spreadsheet = []
spreadsheet << ["Topics"]
spreadsheet << topic_headings.map(&:capitalize)
exportable_topics.each do |topics|
# convert exportable_topics into an array of arrays
spreadsheet << topic_headings.map { |h| topics.send(h) }
end
spreadsheet << []
spreadsheet << ["Synapses"]
spreadsheet << synapse_headings.map(&:capitalize)
exportable_synapses.each do |synapse|
# convert exportable_synapses into an array of arrays
spreadsheet << synapse_headings.map { |h| synapse.send(h) }
end
spreadsheet
end
end

View file

@ -0,0 +1,9 @@
<table>
<% @spreadsheet.each do |line| %>
<tr>
<% line.each do |field| %>
<td><%= field %></td>
<% end %>
</tr>
<% end %>
</table>

View file

@ -1,26 +0,0 @@
<table>
<tr>
<th>ID</th>
<th>Name</th>
<th>Metacode</th>
<th>Description</th>
<th>Link</th>
<th>Username</th>
<th>Permission</th>
<th>Synapses</th>
</tr>
<% @map.topics.each do |topic| %>
<tr>
<td><%= topic.id %></td>
<td><%= topic.name %></td>
<td><%= topic.metacode.name %></td>
<td><%= topic.desc %></td>
<td><%= topic.link %></td>
<td><%= topic.user.name %></td>
<td><%= topic.permission %></td>
<% topic.synapses_csv.each do |s_text| %>
<td><%= s_text %></td>
<% end %>
</tr>
<% end %>
</table>

View file

@ -23,7 +23,10 @@ Metamaps::Application.routes.draw do
resources :messages, only: [:show, :create, :update, :destroy]
resources :mappings, except: [:index, :new, :edit]
resources :metacode_sets, :except => [:show]
resources :metacodes, :except => [:show, :destroy]
resources :metacodes, :except => [:destroy]
get 'metacodes/:name', to: 'metacodes#show'
resources :synapses, except: [:index, :new, :edit]
resources :topics, except: [:index, :new, :edit] do
get :autocomplete_topic, :on => :collection
@ -33,6 +36,8 @@ Metamaps::Application.routes.draw do
get 'topics/:id/relatives', to: 'topics#relatives', as: :relatives
resources :maps, except: [:index, :new, :edit]
get 'maps/:id/export', to: 'maps#export'
get 'explore/active', to: 'maps#activemaps'
get 'explore/featured', to: 'maps#featuredmaps'
get 'explore/mine', to: 'maps#mymaps'