1
Fork 0

Auto merge of #46073 - GuillaumeGomez:rollup, r=GuillaumeGomez

Rollup of 4 pull requests

- Successful merges: #45767, #46044, #46066, #46071
- Failed merges:
This commit is contained in:
bors 2017-11-18 11:38:06 +00:00
commit 18250b0349
17 changed files with 115 additions and 107 deletions

View file

@ -38,17 +38,19 @@ function! Forcing you to write `main` for every example, no matter how small,
adds friction. So `rustdoc` processes your examples slightly before
running them. Here's the full algorithm rustdoc uses to preprocess examples:
1. Any leading `#![foo]` attributes are left intact as crate attributes.
2. Some common `allow` attributes are inserted, including
1. Some common `allow` attributes are inserted, including
`unused_variables`, `unused_assignments`, `unused_mut`,
`unused_attributes`, and `dead_code`. Small examples often trigger
these lints.
3. If the example does not contain `extern crate`, then `extern crate
2. Any attributes specified with `#![doc(test(attr(...)))]` are added.
3. Any leading `#![foo]` attributes are left intact as crate attributes.
4. If the example does not contain `extern crate`, and
`#![doc(test(no_crate_inject))]` was not specified, then `extern crate
<mycrate>;` is inserted (note the lack of `#[macro_use]`).
4. Finally, if the example does not contain `fn main`, the remainder of the
5. Finally, if the example does not contain `fn main`, the remainder of the
text is wrapped in `fn main() { your_code }`.
For more about that caveat in rule 3, see "Documeting Macros" below.
For more about that caveat in rule 4, see "Documeting Macros" below.
## Hiding portions of the example

View file

@ -103,6 +103,26 @@ to it in the docs. But if you include this:
it will not.
### `test(no_crate_inject)`
By default, `rustdoc` will automatically add a line with `extern crate my_crate;` into each doctest.
But if you include this:
```rust,ignore
#![doc(test(no_crate_inject))]
```
it will not.
### `test(attr(...))`
This form of the `doc` attribute allows you to add arbitrary attributes to all your doctests. For
example, if you want your doctests to fail if they produce any warnings, you could add this:
```rust,ignore
#![doc(test(attr(deny(warnings))))]
```
## At the item level
These forms of the `#[doc]` attribute are used on individual items, to control how

View file

@ -13,7 +13,7 @@ while True:
if more_re.match(line):
indent += 1
print "%03d %s%s" % (indent, " " * indent, line.strip())
print("%03d %s%s" % (indent, " " * indent, line.strip()))
if less_re.match(line):
indent -= 1

View file

@ -50,11 +50,11 @@ def block_trim(s):
lns = lns[:-1]
# remove leading horizontal whitespace
n = sys.maxint
n = sys.maxsize
for ln in lns:
if ln.strip():
n = min(n, len(re.search('^\s*', ln).group()))
if n != sys.maxint:
if n != sys.maxsize:
lns = [ln[n:] for ln in lns]
# strip trailing whitespace

View file

@ -97,11 +97,15 @@ from collections import namedtuple
from subprocess import Popen, check_call, PIPE
from glob import glob
import multiprocessing
import Queue
import threading
import ctypes
import binascii
try: # Python 3
import queue as Queue
except ImportError: # Python 2
import Queue
NUM_WORKERS = 2
UPDATE_EVERY_N = 50000
INF = namedtuple('INF', '')()

View file

@ -82,9 +82,6 @@ pub struct Mir<'tcx> {
/// in scope, but a separate set of locals.
pub promoted: IndexVec<Promoted, Mir<'tcx>>,
/// Return type of the function.
pub return_ty: Ty<'tcx>,
/// Yield type of the function, if it is a generator.
pub yield_ty: Option<Ty<'tcx>>,
@ -135,7 +132,6 @@ impl<'tcx> Mir<'tcx> {
visibility_scope_info: ClearOnDecode<IndexVec<VisibilityScope,
VisibilityScopeInfo>>,
promoted: IndexVec<Promoted, Mir<'tcx>>,
return_ty: Ty<'tcx>,
yield_ty: Option<Ty<'tcx>>,
local_decls: IndexVec<Local, LocalDecl<'tcx>>,
arg_count: usize,
@ -145,14 +141,12 @@ impl<'tcx> Mir<'tcx> {
// We need `arg_count` locals, and one for the return pointer
assert!(local_decls.len() >= arg_count + 1,
"expected at least {} locals, got {}", arg_count + 1, local_decls.len());
assert_eq!(local_decls[RETURN_POINTER].ty, return_ty);
Mir {
basic_blocks,
visibility_scopes,
visibility_scope_info,
promoted,
return_ty,
yield_ty,
generator_drop: None,
generator_layout: None,
@ -273,6 +267,11 @@ impl<'tcx> Mir<'tcx> {
&block.terminator().source_info
}
}
/// Return the return type, it always return first element from `local_decls` array
pub fn return_ty(&self) -> Ty<'tcx> {
self.local_decls[RETURN_POINTER].ty
}
}
#[derive(Clone, Debug)]
@ -299,7 +298,6 @@ impl_stable_hash_for!(struct Mir<'tcx> {
visibility_scopes,
visibility_scope_info,
promoted,
return_ty,
yield_ty,
generator_drop,
generator_layout,
@ -1744,7 +1742,6 @@ impl<'tcx> TypeFoldable<'tcx> for Mir<'tcx> {
visibility_scopes: self.visibility_scopes.clone(),
visibility_scope_info: self.visibility_scope_info.clone(),
promoted: self.promoted.fold_with(folder),
return_ty: self.return_ty.fold_with(folder),
yield_ty: self.yield_ty.fold_with(folder),
generator_drop: self.generator_drop.fold_with(folder),
generator_layout: self.generator_layout.fold_with(folder),
@ -1763,7 +1760,6 @@ impl<'tcx> TypeFoldable<'tcx> for Mir<'tcx> {
self.generator_layout.visit_with(visitor) ||
self.yield_ty.visit_with(visitor) ||
self.promoted.visit_with(visitor) ||
self.return_ty.visit_with(visitor) ||
self.local_decls.visit_with(visitor)
}
}

View file

@ -292,7 +292,7 @@ macro_rules! make_mir_visitor {
self.visit_visibility_scope_data(scope);
}
self.visit_ty(&$($mutability)* mir.return_ty, TyContext::ReturnTy(SourceInfo {
self.visit_ty(&$($mutability)* mir.return_ty(), TyContext::ReturnTy(SourceInfo {
span: mir.span,
scope: ARGUMENT_VISIBILITY_SCOPE,
}));

View file

@ -444,7 +444,7 @@ fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>,
}).collect()
});
let mut mir = builder.finish(upvar_decls, return_ty, yield_ty);
let mut mir = builder.finish(upvar_decls, yield_ty);
mir.spread_arg = spread_arg;
mir
}
@ -469,7 +469,7 @@ fn construct_const<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>,
// Constants can't `return` so a return block should not be created.
assert_eq!(builder.cached_return_block, None);
builder.finish(vec![], ty, None)
builder.finish(vec![], None)
}
fn construct_error<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>,
@ -481,7 +481,7 @@ fn construct_error<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>,
let mut builder = Builder::new(hir, span, 0, Safety::Safe, ty);
let source_info = builder.source_info(span);
builder.cfg.terminate(START_BLOCK, source_info, TerminatorKind::Unreachable);
builder.finish(vec![], ty, None)
builder.finish(vec![], None)
}
impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
@ -524,7 +524,6 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
fn finish(self,
upvar_decls: Vec<UpvarDecl>,
return_ty: Ty<'tcx>,
yield_ty: Option<Ty<'tcx>>)
-> Mir<'tcx> {
for (index, block) in self.cfg.basic_blocks.iter().enumerate() {
@ -537,7 +536,6 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
self.visibility_scopes,
ClearOnDecode::Set(self.visibility_scope_info),
IndexVec::new(),
return_ty,
yield_ty,
self.local_decls,
self.arg_count,

View file

@ -197,7 +197,6 @@ fn build_drop_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
),
ClearOnDecode::Clear,
IndexVec::new(),
sig.output(),
None,
local_decls_for_sig(&sig, span),
sig.inputs().len(),
@ -345,7 +344,6 @@ impl<'a, 'tcx> CloneShimBuilder<'a, 'tcx> {
),
ClearOnDecode::Clear,
IndexVec::new(),
self.sig.output(),
None,
self.local_decls,
self.sig.inputs().len(),
@ -808,7 +806,6 @@ fn build_call_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
),
ClearOnDecode::Clear,
IndexVec::new(),
sig.output(),
None,
local_decls,
sig.inputs().len(),
@ -881,7 +878,6 @@ pub fn build_adt_ctor<'a, 'gcx, 'tcx>(infcx: &infer::InferCtxt<'a, 'gcx, 'tcx>,
),
ClearOnDecode::Clear,
IndexVec::new(),
sig.output(),
None,
local_decls,
sig.inputs().len(),

View file

@ -557,7 +557,6 @@ fn create_generator_drop_shim<'a, 'tcx>(
}
// Replace the return variable
mir.return_ty = tcx.mk_nil();
mir.local_decls[RETURN_POINTER] = LocalDecl {
mutability: Mutability::Mut,
ty: tcx.mk_nil(),
@ -777,7 +776,7 @@ impl MirPass for StateTransform {
let state_did = tcx.lang_items().gen_state().unwrap();
let state_adt_ref = tcx.adt_def(state_did);
let state_substs = tcx.mk_substs([Kind::from(yield_ty),
Kind::from(mir.return_ty)].iter());
Kind::from(mir.return_ty())].iter());
let ret_ty = tcx.mk_adt(state_adt_ref, state_substs);
// We rename RETURN_POINTER which has type mir.return_ty to new_ret_local
@ -808,7 +807,6 @@ impl MirPass for StateTransform {
transform.visit_mir(mir);
// Update our MIR struct to reflect the changed we've made
mir.return_ty = ret_ty;
mir.yield_ty = None;
mir.arg_count = 1;
mir.spread_arg = None;

View file

@ -287,7 +287,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> {
let span = self.promoted.span;
let new_operand = Operand::Constant(box Constant {
span,
ty: self.promoted.return_ty,
ty: self.promoted.return_ty(),
literal: Literal::Promoted {
index: Promoted::new(self.source.promoted.len())
}
@ -385,7 +385,6 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>,
mir.visibility_scopes.clone(),
mir.visibility_scope_info.clone(),
IndexVec::new(),
ty,
None,
initial_locals,
0,

View file

@ -380,7 +380,7 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> {
// conservative type qualification instead.
if self.qualif.intersects(Qualif::CONST_ERROR) {
self.qualif = Qualif::empty();
let return_ty = mir.return_ty;
let return_ty = mir.return_ty();
self.add_type(return_ty);
}
@ -938,7 +938,7 @@ fn mir_const_qualif<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// performing the steal.
let mir = &tcx.mir_const(def_id).borrow();
if mir.return_ty.references_error() {
if mir.return_ty().references_error() {
tcx.sess.delay_span_bug(mir.span, "mir_const_qualif: Mir had errors");
return (Qualif::NOT_CONST.bits(), Rc::new(IdxSetBuf::new_empty(0)));
}
@ -956,7 +956,7 @@ impl MirPass for QualifyAndPromoteConstants {
src: MirSource,
mir: &mut Mir<'tcx>) {
// There's not really any point in promoting errorful MIR.
if mir.return_ty.references_error() {
if mir.return_ty().references_error() {
tcx.sess.delay_span_bug(mir.span, "QualifyAndPromoteConstants: Mir had errors");
return;
}
@ -1045,7 +1045,7 @@ impl MirPass for QualifyAndPromoteConstants {
return;
}
}
let ty = mir.return_ty;
let ty = mir.return_ty();
tcx.infer_ctxt().enter(|infcx| {
let param_env = ty::ParamEnv::empty(Reveal::UserFacing);
let cause = traits::ObligationCause::new(mir.span, id, traits::SharedStatic);

View file

@ -130,7 +130,7 @@ impl<'a, 'b, 'gcx, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'gcx, 'tcx> {
}
fn visit_mir(&mut self, mir: &Mir<'tcx>) {
self.sanitize_type(&"return type", mir.return_ty);
self.sanitize_type(&"return type", mir.return_ty());
for local_decl in &mir.local_decls {
self.sanitize_type(local_decl, local_decl.ty);
}

View file

@ -150,7 +150,7 @@ fn write_graph_label<'a, 'gcx, 'tcx, W: Write>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
write!(w, "{:?}: {}", Lvalue::Local(arg), escape(&mir.local_decls[arg].ty))?;
}
write!(w, ") -&gt; {}", escape(mir.return_ty))?;
write!(w, ") -&gt; {}", escape(mir.return_ty()))?;
write!(w, r#"<br align="left"/>"#)?;
for local in mir.vars_and_temps_iter() {

View file

@ -392,13 +392,13 @@ fn write_mir_sig(tcx: TyCtxt, src: MirSource, mir: &Mir, w: &mut Write)
write!(w, "{:?}: {}", Lvalue::Local(arg), mir.local_decls[arg].ty)?;
}
write!(w, ") -> {}", mir.return_ty)
write!(w, ") -> {}", mir.return_ty())
}
(hir::BodyOwnerKind::Const, _) |
(hir::BodyOwnerKind::Static(_), _) |
(_, Some(_)) => {
assert_eq!(mir.arg_count, 0);
write!(w, ": {} =", mir.return_ty)
write!(w, ": {} =", mir.return_ty())
}
}
}

View file

@ -382,13 +382,6 @@
}
}
function min(a, b) {
if (a < b) {
return a;
}
return b;
}
function extractGenerics(val) {
val = val.toLowerCase();
if (val.indexOf('<') !== -1) {
@ -426,7 +419,7 @@
}
if (lev.pos !== -1) {
elems.splice(lev.pos, 1);
lev_distance = min(lev.lev, lev_distance);
lev_distance = Math.min(lev.lev, lev_distance);
} else {
return MAX_LEV_DISTANCE + 1;
}
@ -489,11 +482,12 @@
var new_lev = levenshtein(obj.name, val.name);
if (new_lev < lev_distance) {
if ((lev = checkGenerics(obj, val)) <= MAX_LEV_DISTANCE) {
lev_distance = min(min(new_lev, lev), lev_distance);
lev_distance = Math.min(Math.min(new_lev, lev), lev_distance);
}
} else if (obj.generics && obj.generics.length > 0) {
for (var x = 0; x < obj.generics.length; ++x) {
lev_distance = min(levenshtein(obj.generics[x], val.name), lev_distance);
lev_distance = Math.min(levenshtein(obj.generics[x], val.name),
lev_distance);
}
}
// Now whatever happens, the returned distance is "less good" so we should mark it
@ -510,7 +504,7 @@
if (literalSearch === true && tmp === true) {
return true;
}
lev_distance = min(tmp, lev_distance);
lev_distance = Math.min(tmp, lev_distance);
if (lev_distance === 0) {
return 0;
}
@ -527,7 +521,7 @@
if (literalSearch === true && tmp === true) {
return true;
}
lev_distance = min(tmp, lev_distance);
lev_distance = Math.min(tmp, lev_distance);
if (lev_distance === 0) {
return 0;
}
@ -568,18 +562,20 @@
var in_args = findArg(searchIndex[i], val, true);
var returned = checkReturned(searchIndex[i], val, true);
var ty = searchIndex[i];
var fullId = itemTypes[ty.ty] + ty.path + ty.name;
if (searchWords[i] === val.name) {
// filter type: ... queries
if (typePassesFilter(typeFilter, searchIndex[i].ty) &&
results[ty.path + ty.name] === undefined)
results[fullId] === undefined)
{
results[ty.path + ty.name] = {id: i, index: -1};
results[fullId] = {id: i, index: -1};
results_length += 1;
}
} else if ((in_args === true || returned === true) &&
typePassesFilter(typeFilter, searchIndex[i].ty)) {
if (results[ty.path + ty.name] === undefined) {
results[ty.path + ty.name] = {
if (results[fullId] === undefined) {
results[fullId] = {
id: i,
index: -1,
dontValidate: true,
@ -589,10 +585,10 @@
results_length += 1;
} else {
if (in_args === true) {
results[ty.path + ty.name].in_args = true;
results[fullId].in_args = true;
}
if (returned === true) {
results[ty.path + ty.name].returned = true;
results[fullId].returned = true;
}
}
}
@ -621,6 +617,7 @@
if (!type) {
continue;
}
var fullId = itemTypes[ty.ty] + ty.path + ty.name;
// allow searching for void (no output) functions as well
var typeOutput = type.output ? type.output.name : "";
@ -639,15 +636,15 @@
in_args = allFound;
}
if (in_args === true || returned === true || module === true) {
if (results[ty.path + ty.name] !== undefined) {
if (results[fullId] !== undefined) {
if (returned === true) {
results[ty.path + ty.name].returned = true;
results[fullId].returned = true;
}
if (in_args === true) {
results[ty.path + ty.name].in_args = true;
results[fullId].in_args = true;
}
} else {
results[ty.path + ty.name] = {
results[fullId] = {
id: i,
index: -1,
dontValidate: true,
@ -682,48 +679,49 @@
var index = -1;
// we want lev results to go lower than others
var lev = MAX_LEV_DISTANCE;
var fullId = itemTypes[ty.ty] + ty.path + ty.name;
if (searchWords[j].indexOf(split[i]) > -1 ||
searchWords[j].indexOf(val) > -1 ||
searchWords[j].replace(/_/g, "").indexOf(val) > -1)
{
// filter type: ... queries
if (typePassesFilter(typeFilter, searchIndex[j].ty) &&
results[ty.path + ty.name] === undefined) {
if (typePassesFilter(typeFilter, ty) &&
results[fullId] === undefined) {
index = searchWords[j].replace(/_/g, "").indexOf(val);
}
}
if ((lev_distance = levenshtein(searchWords[j], val)) <= MAX_LEV_DISTANCE) {
if (typePassesFilter(typeFilter, searchIndex[j].ty) &&
(results[ty.path + ty.name] === undefined ||
results[ty.path + ty.name].lev > lev_distance)) {
lev = min(lev, lev_distance);
index = 0;
if (typePassesFilter(typeFilter, ty) &&
(results[fullId] === undefined ||
results[fullId].lev > lev_distance)) {
lev = Math.min(lev, lev_distance);
index = Math.max(0, index);
}
}
if ((lev_distance = findArg(searchIndex[j], valGenerics))
<= MAX_LEV_DISTANCE) {
if (typePassesFilter(typeFilter, searchIndex[j].ty) &&
(results[ty.path + ty.name] === undefined ||
results[ty.path + ty.name].lev > lev_distance)) {
if (typePassesFilter(typeFilter, ty) &&
(results[fullId] === undefined ||
results[fullId].lev > lev_distance)) {
in_args = true;
lev = min(lev_distance, lev);
index = 0;
lev = Math.min(lev_distance, lev);
index = Math.max(0, index);
}
}
if ((lev_distance = checkReturned(searchIndex[j], valGenerics)) <=
MAX_LEV_DISTANCE) {
if (typePassesFilter(typeFilter, searchIndex[j].ty) &&
(results[ty.path + ty.name] === undefined ||
results[ty.path + ty.name].lev > lev_distance)) {
if (typePassesFilter(typeFilter, ty) &&
(results[fullId] === undefined ||
results[fullId].lev > lev_distance)) {
returned = true;
lev = min(lev_distance, lev);
index = 0;
lev = Math.min(lev_distance, lev);
index = Math.max(0, index);
}
}
if (index !== -1) {
if (results[ty.path + ty.name] === undefined) {
results[ty.path + ty.name] = {
if (results[fullId] === undefined) {
results[fullId] = {
id: j,
index: index,
lev: lev,
@ -732,14 +730,14 @@
};
results_length += 1;
} else {
if (results[ty.path + ty.name].lev > lev) {
results[ty.path + ty.name].lev = lev;
if (results[fullId].lev > lev) {
results[fullId].lev = lev;
}
if (in_args === true) {
results[ty.path + ty.name].in_args = true;
results[fullId].in_args = true;
}
if (returned === true) {
results[ty.path + ty.name].returned = true;
results[fullId].returned = true;
}
}
}

View file

@ -89,7 +89,7 @@ def load_unicode_data(f):
if is_surrogate(cp):
continue
if range_start >= 0:
for i in xrange(range_start, cp):
for i in range(range_start, cp):
udict[i] = data
range_start = -1
if data[1].endswith(", First>"):
@ -382,7 +382,7 @@ def compute_trie(rawdata, chunksize):
root = []
childmap = {}
child_data = []
for i in range(len(rawdata) / chunksize):
for i in range(len(rawdata) // chunksize):
data = rawdata[i * chunksize: (i + 1) * chunksize]
child = '|'.join(map(str, data))
if child not in childmap:
@ -400,7 +400,7 @@ def emit_bool_trie(f, name, t_data, is_pub=True):
# convert to bitmap chunks of 64 bits each
chunks = []
for i in range(0x110000 / CHUNK):
for i in range(0x110000 // CHUNK):
chunk = 0
for j in range(64):
if rawdata[i * 64 + j]:
@ -412,12 +412,12 @@ def emit_bool_trie(f, name, t_data, is_pub=True):
pub_string = "pub "
f.write(" %sconst %s: &'static super::BoolTrie = &super::BoolTrie {\n" % (pub_string, name))
f.write(" r1: [\n")
data = ','.join('0x%016x' % chunk for chunk in chunks[0:0x800 / CHUNK])
data = ','.join('0x%016x' % chunk for chunk in chunks[0:0x800 // CHUNK])
format_table_content(f, data, 12)
f.write("\n ],\n")
# 0x800..0x10000 trie
(r2, r3) = compute_trie(chunks[0x800 / CHUNK : 0x10000 / CHUNK], 64 / CHUNK)
(r2, r3) = compute_trie(chunks[0x800 // CHUNK : 0x10000 // CHUNK], 64 // CHUNK)
f.write(" r2: [\n")
data = ','.join(str(node) for node in r2)
format_table_content(f, data, 12)
@ -428,7 +428,7 @@ def emit_bool_trie(f, name, t_data, is_pub=True):
f.write("\n ],\n")
# 0x10000..0x110000 trie
(mid, r6) = compute_trie(chunks[0x10000 / CHUNK : 0x110000 / CHUNK], 64 / CHUNK)
(mid, r6) = compute_trie(chunks[0x10000 // CHUNK : 0x110000 // CHUNK], 64 // CHUNK)
(r4, r5) = compute_trie(mid, 64)
f.write(" r4: [\n")
data = ','.join(str(node) for node in r4)
@ -446,14 +446,14 @@ def emit_bool_trie(f, name, t_data, is_pub=True):
f.write(" };\n\n")
def emit_small_bool_trie(f, name, t_data, is_pub=True):
last_chunk = max(int(hi / 64) for (lo, hi) in t_data)
last_chunk = max(hi // 64 for (lo, hi) in t_data)
n_chunks = last_chunk + 1
chunks = [0] * n_chunks
for (lo, hi) in t_data:
for cp in range(lo, hi + 1):
if int(cp / 64) >= len(chunks):
print(cp, int(cp / 64), len(chunks), lo, hi)
chunks[int(cp / 64)] |= 1 << (cp & 63)
if cp // 64 >= len(chunks):
print(cp, cp // 64, len(chunks), lo, hi)
chunks[cp // 64] |= 1 << (cp & 63)
pub_string = ""
if is_pub:
@ -519,32 +519,29 @@ def emit_conversions_module(f, to_upper, to_lower, to_title):
pfun = lambda x: "(%s,[%s,%s,%s])" % (
escape_char(x[0]), escape_char(x[1][0]), escape_char(x[1][1]), escape_char(x[1][2]))
emit_table(f, "to_lowercase_table",
sorted(to_lower.iteritems(), key=operator.itemgetter(0)),
sorted(to_lower.items(), key=operator.itemgetter(0)),
is_pub=False, t_type = t_type, pfun=pfun)
emit_table(f, "to_uppercase_table",
sorted(to_upper.iteritems(), key=operator.itemgetter(0)),
sorted(to_upper.items(), key=operator.itemgetter(0)),
is_pub=False, t_type = t_type, pfun=pfun)
f.write("}\n\n")
def emit_norm_module(f, canon, compat, combine, norm_props):
canon_keys = canon.keys()
canon_keys.sort()
canon_keys = sorted(canon.keys())
compat_keys = compat.keys()
compat_keys.sort()
compat_keys = sorted(compat.keys())
canon_comp = {}
comp_exclusions = norm_props["Full_Composition_Exclusion"]
for char in canon_keys:
if True in map(lambda (lo, hi): lo <= char <= hi, comp_exclusions):
if any(lo <= char <= hi for lo, hi in comp_exclusions):
continue
decomp = canon[char]
if len(decomp) == 2:
if not canon_comp.has_key(decomp[0]):
if decomp[0] not in canon_comp:
canon_comp[decomp[0]] = []
canon_comp[decomp[0]].append( (decomp[1], char) )
canon_comp_keys = canon_comp.keys()
canon_comp_keys.sort()
canon_comp_keys = sorted(canon_comp.keys())
if __name__ == "__main__":
r = "tables.rs"